gmzsebastian / FLEET

Finding Luminous and Exotic Extragalactic Transients
MIT License
9 stars 4 forks source link

Catalogue query error #5

Closed LydiaMak closed 2 weeks ago

LydiaMak commented 2 weeks ago

Hi,

I get this error but I am not sure what's the issue or what's the workaround. Any help it would be great!

Cheers, Lydia

Exception                                 Traceback (most recent call last)
File ~/miniconda3/lib/python3.10/site-packages/fleet_pipe-1.0.0-py3.10.egg/FLEET/catalog.py:1100, in get_catalog(object_name, ra_deg, dec_deg, search_radius, dust_map, reimport_catalog)
   1099 try:
-> 1100     data_catalog_in = query_relevant(ra_deg, dec_deg, search_radius, dust_map = dust_map)
   1101 except:

File ~/miniconda3/lib/python3.10/site-packages/fleet_pipe-1.0.0-py3.10.egg/FLEET/catalog.py:992, in query_relevant(ra_deg, dec_deg, search_radius, dust_map)
    991 # Query Catalogs
--> 992 catalog_3pi  = query_3pi (ra_deg, dec_deg, search_radius)
    993 catalog_SDSS = query_SDSS(ra_deg, dec_deg, search_radius)

File ~/miniconda3/lib/python3.10/site-packages/fleet_pipe-1.0.0-py3.10.egg/FLEET/catalog.py:284, in query_3pi(ra_deg, dec_deg, search_radius)
    283 jobs    = mastcasjobs.MastCasJobs(userid=wsid, password=password, context="PanSTARRS_DR1")
--> 284 results = jobs.quick(la_query, task_name="python cone search")
    286 # For New format

File ~/miniconda3/lib/python3.10/site-packages/mastcasjobs-0.0.5-py3.10.egg/mastcasjobs/__init__.py:152, in MastCasJobs.quick(self, q, context, task_name, system, astropy)
    132 """
    133 Run a quick job. Like CasJobs method but adds astropy option.
    134 
   (...)
    150 
    151 """
--> 152 results = super(MastCasJobs,self).quick(q, context=context, task_name=task_name, system=system)
    153 if astropy:

File ~/miniconda3/lib/python3.10/site-packages/casjobs-0.0.2-py3.10.egg/casjobs.py:175, in CasJobs.quick(self, q, context, task_name, system)
    173 params = {"qry": q, "context": context, "taskname": task_name,
    174         "isSystem": system}
--> 175 r = self._send_request("ExecuteQuickJob", params=params)
    176 return self._parse_single(r.text, "string")

File ~/miniconda3/lib/python3.10/site-packages/casjobs-0.0.2-py3.10.egg/casjobs.py:103, in CasJobs._send_request(self, job_type, params)
    102     msg = self._parse_error(r.text)
--> 103     raise Exception("%s failed with status: %d\n%s"%(job_type, code, msg))
    104 else:

Exception: ExecuteQuickJob failed with status: 500
A network-related or instance-specific error occurred while establishing a connection to SQL Server. The server was not found or was not accessible. Verify that the instance name is correct and that SQL Server is configured to allow remote connections. (provider: Named Pipes Provider, error: 40 - Could not open a connection to SQL Server) ---> System.Data.SqlClient.SqlException: A network-related or instance-specific error occurred while establishing a connection to SQL Server. The server was not found or was not accessible. Verify that the instance name is correct and that SQL Server is configured to allow remote connections. (provider: Named Pipes Provider, error: 40 - Could not open a connection to SQL Server) 

During handling of the above exception, another exception occurred:

Exception                                 Traceback (most recent call last)
Cell In[34], line 6
      4 for object_name in ts[int(r[i].split(',')[0]):int(r[i].split(',')[1])]:
      5     index = ts.index(object_name)
----> 6     s.append(predict_SLSN(object_name,ra[index],dec[index],plot_lightcurve = True))

File ~/miniconda3/lib/python3.10/site-packages/fleet_pipe-1.0.0-py3.10.egg/FLEET/classify.py:560, in predict_SLSN(object_name_in, ra_in, dec_in, redshift, acceptance_radius, import_ZTF, import_OSC, import_local, import_lightcurve, reimport_catalog, search_radius, dust_map, Pcc_filter, Pcc_filter_alternative, star_separation, star_cut, date_range, late_phase, n_walkers, n_steps, n_cores, model, training_days, hostless_cut, sorting_state, clean, SMOTE_state, clf_state, n_estimators, max_depth, feature_set, neighbors, recalculate_nature, classifier, n_samples, object_class, plot_lightcurve, do_observability, save_features, use_glade)
    555     return table.Table()
    559 ########## Catalog Operations ##########
--> 560 data_catalog_out, catalog_exists = get_catalog(object_name, ra_deg, dec_deg, search_radius, dust_map, reimport_catalog)
    561 # If there's no catalog, return empty
    562 if len(data_catalog_out) == 0:

File ~/miniconda3/lib/python3.10/site-packages/fleet_pipe-1.0.0-py3.10.egg/FLEET/catalog.py:1102, in get_catalog(object_name, ra_deg, dec_deg, search_radius, dust_map, reimport_catalog)
   1100     data_catalog_in = query_relevant(ra_deg, dec_deg, search_radius, dust_map = dust_map)
   1101 except:
-> 1102     data_catalog_in = query_relevant(ra_deg, dec_deg, search_radius, dust_map = dust_map)
   1103 if data_catalog_in:
   1104     pass

File ~/miniconda3/lib/python3.10/site-packages/fleet_pipe-1.0.0-py3.10.egg/FLEET/catalog.py:992, in query_relevant(ra_deg, dec_deg, search_radius, dust_map)
    974 '''
    975 Query SDSS, 3PI, and the available dust maps, and then merge
    976 them into one big catalog
   (...)
    988 One Astropy table with the merged catalog
    989 '''
    991 # Query Catalogs
--> 992 catalog_3pi  = query_3pi (ra_deg, dec_deg, search_radius)
    993 catalog_SDSS = query_SDSS(ra_deg, dec_deg, search_radius)
    995 if len(catalog_3pi) + len(catalog_SDSS) == 0:

File ~/miniconda3/lib/python3.10/site-packages/fleet_pipe-1.0.0-py3.10.egg/FLEET/catalog.py:284, in query_3pi(ra_deg, dec_deg, search_radius)
    282 print('Querying 3PI ...')
    283 jobs    = mastcasjobs.MastCasJobs(userid=wsid, password=password, context="PanSTARRS_DR1")
--> 284 results = jobs.quick(la_query, task_name="python cone search")
    286 # For New format
    287 if type(results) != str:

File ~/miniconda3/lib/python3.10/site-packages/mastcasjobs-0.0.5-py3.10.egg/mastcasjobs/__init__.py:152, in MastCasJobs.quick(self, q, context, task_name, system, astropy)
    131 def quick(self, q, context=None, task_name="quickie", system=False, astropy=True):
    132     """
    133     Run a quick job. Like CasJobs method but adds astropy option.
    134 
   (...)
    150 
    151     """
--> 152     results = super(MastCasJobs,self).quick(q, context=context, task_name=task_name, system=system)
    153     if astropy:
    154         return MastCasJobs.convert_quick_table(results)

File ~/miniconda3/lib/python3.10/site-packages/casjobs-0.0.2-py3.10.egg/casjobs.py:175, in CasJobs.quick(self, q, context, task_name, system)
    172     context = self.context
    173 params = {"qry": q, "context": context, "taskname": task_name,
    174         "isSystem": system}
--> 175 r = self._send_request("ExecuteQuickJob", params=params)
    176 return self._parse_single(r.text, "string")

File ~/miniconda3/lib/python3.10/site-packages/casjobs-0.0.2-py3.10.egg/casjobs.py:103, in CasJobs._send_request(self, job_type, params)
    101 if hasattr(r,'text') and r.text:
    102     msg = self._parse_error(r.text)
--> 103     raise Exception("%s failed with status: %d\n%s"%(job_type, code, msg))
    104 else:
    105     raise Exception("%s failed with status: %d (no additional information)"%(job_type, code))

Exception: ExecuteQuickJob failed with status: 500
A network-related or instance-specific error occurred while establishing a connection to SQL Server. The server was not found or was not accessible. Verify that the instance name is correct and that SQL Server is configured to allow remote connections. (provider: Named Pipes Provider, error: 40 - Could not open a connection to SQL Server) ---> System.Data.SqlClient.SqlException: A network-related or instance-specific error occurred while establishing a connection to SQL Server. The server was not found or was not accessible. Verify that the instance name is correct and that SQL Server is configured to allow remote connections. (provider: Named Pipes Provider, error: 40 - Could not open a connection to SQL Server) 
LydiaMak commented 2 weeks ago

It was fixed. Probably it was due of requesting too many data.