It is possible to use FAST-XPD from command line interface via a basic REST API.
Three of the classical 4 CRUD possible operation are supported: insert a new request (Create), get the status of a specific request (Read), abort a specific request (Delete).
Please note: JSON answers provided in the examples are pretty formatted for readability.
Resource | Input parameters | Answer |
---|---|---|
POST /fastxpd/rest/requests Insert a new request |
Parameters as application/x-www-form-urlencoded request body:
|
JSON answer with request id upon successful submission or error messages in case of wrong/missing parameters, e.g.: Successful submission
Failed submission (missing params)
|
GET /fastxpd/rest/requests/:request_id Get information about a specific request |
|
JSON answer example:
|
DELETE /fastxpd/rest/requests/:request_id Abort a specific request (only available for requests in status "Queued" or "Processing...") |
|
JSON answer example:
|
{
"available_till": 1472639524,
"completed_at": 1472466724,
"completed_runs": 1,
"created_at": 1472466445,
"current_status": "Completed",
"files": {
"res_1": {
"filesize": 397,
"link": "https://in.xfel.eu/fastxpd/output/2016/08/57c40e0d88221/runs/PPROC-FAST2XY_2013.DAT"
},
"res_2": {
"filesize": 3169,
"link": "https://in.xfel.eu/fastxpd/output/2016/08/57c40e0d88221/runs/XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2_readme.txt"
},
"zipfile_res": {
"description": "All output files except HDF5 one(s) in a single zip file",
"filesize": 3004234,
"link": "https://in.xfel.eu/fastxpd/output/2016/08/57c40e0d88221/test_fel_long_pulse_0000fs_res.zip"
}
},
"from_run_number": 1,
"input_folder": "XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2",
"last_status": "Completed",
"nharms": 1,
"number_xy": 50,
"request_id": "57c40e0d88221",
"runs": {
"001": {
"files": [
{
"filesize": 397,
"link": "https://in.xfel.eu/fastxpd/output/2016/08/57c40e0d88221/runs/PPROC-FAST2XY_2013.DAT",
"name": "PPROC-FAST2XY_2013.DAT"
},
{
"filesize": 280,
"link": "https://in.xfel.eu/fastxpd/output/2016/08/57c40e0d88221/runs/XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2_E10001030.RES",
"name": "XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2_E10001030.RES"
},
{
"filesize": 2856370,
"link": "https://in.xfel.eu/fastxpd/output/2016/08/57c40e0d88221/runs/XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2_FXY1_0001030.RES",
"name": "XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2_FXY1_0001030.RES"
},
{
"filesize": 142814,
"link": "https://in.xfel.eu/fastxpd/output/2016/08/57c40e0d88221/runs/XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2_PXY1_0001030.RES",
"name": "XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2_PXY1_0001030.RES"
},
{
"filesize": 3169,
"link": "https://in.xfel.eu/fastxpd/output/2016/08/57c40e0d88221/runs/XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2_readme.txt",
"name": "XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_2_readme.txt"
}
],
"output": " Searching file \n XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_\n 2_T0001030.RES\n Reading file header of reference file and allocating arrays \n XYMAX: 14.9822889435085 \n Input data:\n Time step of input data (fs) = 4.999999999999999E-003\n Mesh step (1) of input data (cm) = 6.952380952380952E-005\n Mesh step (2) of input data (cm) = 3.097127701597077E-004\n Output data:\n Time step of output data (fs) = 4.999999999999999E-002 Ratio: 10\n Number of nodes: 101\n Mesh step (cm) = 6.186941616036239E-004 Ratio: 8.89902561210692 \n XYMAX, XYMAX*RNORMR, DXY*RNORMR: 14.9822889435085 \n 3.093470808018119E-002 6.186941616036239E-004\n 34720667611.4517 \n Input file: \n XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_\n 2_T0001030.RES\n Output file: \n XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_\n 2_FXY1_0001030.RES Nh = 1\n Output file: \n XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_\n 2_E10001030.RES\n Number of slices: 10\n Pulse length: 0.500000000000000 \n Output file: \n XFEL_S1_04.96keV_14.0GeV_0100pC_SASE_U_BLI_2014-02-01_FAST_TEST_LONG_PULSE_NAZ_\n 2_PXY1_0001030.RES Nh = 1\n CPU time: 0.598909000000000 \n",
"status": "Processed"
}
},
"skip_slices": 10,
"time_begin": 0,
"time_end": 0.5,
"to_run_number": 1,
"total_runs": 1,
"updated_at": 1472466724,
"user_prefix": "test_fel_long_pulse_0000fs",
"z_output_point": 30
}
#!/usr/bin/python
import requests
import time
import json
import zipfile
my_email_address = 'something@example.com'
my_path = '/tmp' # where to download and extract result files
url = 'https://in.xfel.eu/fastxpd/rest/requests'
request_id = ''
request_data = {
'input_folder': 'XFEL_S1_04.96keV_12.0GeV_0020pC_SASE_U_BLI_2014-05-01_FAST',
'time_begin': 0,
'time_end': 6.5,
'number_xy': 25,
'skip_slices': 10,
'z_output_point': 20,
'from_run_number': 2,
'to_run_number': 3,
'email': my_email_address,
'user_prefix': 'test_fel'
}
response = requests.post(url, data=request_data)
if not response.ok:
# If response code is not ok (200), print the resulting http error code with description
response.raise_for_status()
answer = json.loads(response.content)
if answer['status'] > 0:
# got an answer
request_id = answer['request_id']
status = 'Queued'
print "Request id: %s" % request_id
else:
# got an error
print answer['messages']
# poll request status
if request_id:
url_request = "%s/%s" % (url, request_id)
while (status == 'Queued' or status == 'Processing...'):
time.sleep(10) # poll status every 10 seconds
response = requests.get(url_request)
answer = json.loads(response.content)
status = answer['current_status']
print "Current status: %s" % status
if status == 'Completed':
print "Request has been completed"
# show available files
files = answer['files']
print "Available result files:"
print files
# download and extract HDF5 files if present
if 'zipfile_hdf5' in files:
zip_file_url = files['zipfile_hdf5']['link']
download_path = "%s/%s" % (my_path, zip_file_url.split('/')[-1])
r = requests.get(zip_file_url, stream=True, verify=False)
print "Downloading %s in %s" % (zip_file_url, download_path)
# downloading zip file
with open(download_path, 'wb') as f:
for chunk in r.iter_content(1024):
f.write(chunk)
print "File downloaded, extracting"
# extract files
z = zipfile.ZipFile(download_path, allowZip64=True)
print z.namelist()
z.extractall(my_path)