import time
import requests
import pyvo as vo
print("pyvo :     {:<12} (>1.3)".format(vo.__version__))
print("requests:  {:<12} (>2.27)".format(requests.__version__))
URL = 'https://gaia.aip.de/uws/simple-join-service'
TOKEN = '<token>'

session = requests.Session()
session.headers['Authorization'] = 'Token {}'.format(TOKEN)
# session.headers['Host'] = 'localhost:80'
service = vo.dal.DALService(URL, session=session)
service
job_id = '<job-id>' # containing the source_id 
join_table = 'gaiadr3.epoch_photometry' # the table you wish to join 
data_structure = "INDIVIDUAL" # how should it be structured
response_format = "ecsv" # in which format

schema, table = join_table.split('.')
output_filename = f"{job_id}.zip"
# create query object
query = service.create_query(job_id=job_id, 
                             column_name='source_id',
                             responseformat=response_format,
                             join_table=join_table,
                             data_structure=data_structure
                            )
query
# submit job
response = query.submit(post=True)
if response.status_code == 200:
    job = vo.io.uws.parse_job(response.raw.read)
    print(job.phase)
else:
    print(response.content)
job.phase
# start job
response = service._session.post('{}/{}/phase'.format(service._baseurl, job.jobid), data={"PHASE": "RUN"}, stream=True)
job = vo.io.uws.parse_job(response.raw.read)
job.phase
# fetch job from url and jobid
response = service._session.get('{}/{}'.format(service._baseurl, job.jobid), stream=True)
job = vo.io.uws.parse_job(response.raw.read)
# fetch phase
response = service._session.get('{}/{}/phase'.format(service._baseurl, job.jobid))
response.content
# abort job
response = service._session.post('{}/{}/phase'.format(service._baseurl, job.jobid), data={"PHASE": "ABORT"}, stream=True)
job = vo.io.uws.parse_job(response.raw.read)
job.phase
# download result
response = service._session.get(job.results[0].href, stream=True)
with open(output_filename, 'ab') as fd:
    for chunk in response.iter_content(chunk_size=65*1024):
        fd.write(chunk)

print(f"[Done] --> {output_filename}")
# archive job
response = service._session.delete('{}/{}'.format(service._baseurl, job.jobid), stream=True)
response

Search