"""
The datalink core and its numerous helper classes.
More on this in "Datalink Cores" in the reference documentation.
"""
#c Copyright 2008-2023, the GAVO project <gavo@ari.uni-heidelberg.de>
#c
#c This program is free software, covered by the GNU GPL. See the
#c COPYING file in the source distribution.
import contextlib
import itertools
import inspect
import os
import urllib.request, urllib.parse, urllib.error
from gavo import base
from gavo import formats
from gavo import rsc
from gavo import rscdef
from gavo import svcs
from gavo import utils
from gavo.formal import nevowc
from gavo.formats import votablewrite
from gavo.protocols import dlasync
from gavo.protocols import products
from gavo.protocols import soda
from gavo.protocols.soda import (FormatNow, DeliverNow, DatalinkFault,
DEFAULT_SEMANTICS)
from gavo.utils import fitstools
from gavo.utils import pyfits
from gavo.votable import V, modelgroups
from twisted.web import static
LINKS_STANDARD_ID = "ivo://ivoa.net/std/datalink#links-1.1"
SYNC_STANDARD_ID = "ivo://ivoa.net/std/soda#sync-1.0"
ASYNC_STANDARD_ID = "ivo://ivoa.net/std/soda#async-1.0"
MS = base.makeStruct
[docs]class DatalinkDescriptorMixin:
"""some generally useful attributes that various code expects datalink
descriptors to have.
These are the makeLink/makeLinkFromFile methods, but also data (where
SODA processors accumulate the data they will serve), forSemantics
(the semantics we are trying to produce) and contentQualifier
(the sort of data at the other end; only touch this if you expect
your link to be sent through SAMP).
"""
data = None
forSemantics = DEFAULT_SEMANTICS
contentQualifier = None
[docs] def makeLink(self, url, **kwargs):
"""returns a LinkDef for this descriptor for url.
kwargs are passed on to LinkDef and include, in particular,
semantics, contentType, contentLength, description,
localSemantics, and contentQualifier.
"""
if not "semantics" in kwargs:
kwargs["semantics"] = self.forSemantics
return LinkDef(self.pubDID, url, **kwargs)
[docs] def makeLinkFromFile(self, localPath, description, semantics=None,
service=None, contentType=None, suppressMissing=False,
localSemantics=None, contentQualifier=None):
"""returns a LinkDef for a local file.
Arguments are as for LinkDef.fromFile, except you don't have
to pass in service if you're using the datalink service itself
to access the file; this method will try to find the service by
itself.
"""
if service is None:
try:
service = inspect.currentframe().f_back.f_locals["self"].parent
except (KeyError, AttributeError):
raise base.StructureError("Cannot infer service for datalink"
" file link. Pass an appropriate service manually.")
if semantics is None:
semantics = self.forSemantics
return LinkDef.fromFile(localPath, description, semantics,
service=service, contentType=contentType,
suppressMissing=suppressMissing, localSemantics=localSemantics,
contentQualifier=contentQualifier)
[docs] @contextlib.contextmanager
def currentSemantics(self, sem):
"""sets the default semantics for links and faults generated in
the controlled block to sem.
"""
oldSemantics = self.forSemantics
self.forSemantics = sem
try:
yield
finally:
self.forSemantics = oldSemantics
[docs] def cleanup(self):
"""this method is called at the end of a dlget run.
Use it to, for instance, close any files you still have open. By
default, it does nothing.
"""
[docs]class ProductDescriptor(DatalinkDescriptorMixin):
"""An encapsulation of information about some "product" (i.e., file).
This is basically equivalent to a line in the product table; the
arguments of the constructor are all available as same-named attributes.
It also has an attribute data defaulting to None. DataGenerators
set it, DataFilters potentially change it.
There is also a method cleanup() that is called at the end of dlget
processing; the default implementation does nothing, so there is no
need to up-call when you override it from here.
If you inherit from this class and you have a way to guess the
size of what the descriptor describes, override the estimateSize()
method. The default will return a file size if accessPath points
to an existing file, None otherwise.
"""
def __init__(self, pubDID, accref, accessPath, mime,
owner=None, embargo=None, sourceTable=None, datalink=None,
preview=None, preview_mime=None):
self.pubDID = pubDID
self.accref, self.accessPath, self.mime = accref, accessPath, mime
self.owner, self.embargo, self.sourceTable = owner, embargo, sourceTable
self.preview, self.previewMime = preview, preview_mime
[docs] @classmethod
def fromAccref(cls, pubDID, accref, accrefPrefix=None, **moreKWs):
"""returns a product descriptor for an access reference.
If an accrefPrefix is passed in, an AuthenticationFault (for want
of something better fitting) is returned when the accref doesn't
start with accrefPrefix.
"""
if accrefPrefix and not accref.startswith(accrefPrefix):
return DatalinkFault.AuthenticationFault(pubDID,
"This Datalink service not available"
" with this pubDID", semantics="#this")
kwargs = products.RAccref(accref).productsRow
kwargs.update(moreKWs)
return cls(pubDID, **kwargs)
[docs] def estimateSize(self):
if isinstance(self.accessPath, str):
candPath = os.path.join(base.getConfig("inputsDir"), self.accessPath)
try:
return os.path.getsize(candPath)
except:
# fall through to returning None
pass
[docs]class FITSProductDescriptor(ProductDescriptor):
"""A SODA descriptor for FITS files.
On top of the normal product descriptor, this has an attribute hdr
containing a copy of the image header, and a method
changingAxis (see there).
There's also an attribute dataIsPristine that must be set to false
if changes have been made. The formatter will spit out the original
data otherwise, ignoring your changes.
Finally, there's a slices attribute provided explained in
soda#fits_doWCSCutout that can be used by data functions running before
it to do cutouts.
Append any files you have open during dlget processing to self.toClose.
The cleanup method makes sure they are closed before continuing.
The FITSProductDescriptor is constructed like a normal ProductDescriptor.
"""
def __init__(self, *args, **kwargs):
self.toClose = []
qnd = kwargs.pop("qnd", True)
ProductDescriptor.__init__(self, *args, **kwargs)
self.imageExtind = 0
if qnd and self.imageExtind==0:
with open(os.path.join(
base.getConfig("inputsDir"), self.accessPath), "rb") as f:
self.hdr = utils.readPrimaryHeaderQuick(f,
maxHeaderBlocks=100)
else:
hdus = pyfits.open(os.path.join(
base.getConfig("inputsDir"), self.accessPath))
self.imageExtind = fitstools.fixImageExtind(
hdus, self.imageExtind)
self.hdr = hdus[self.imageExtind].header
hdus.close()
self.slices = []
self.dataIsPristine = True
self._axesTouched = set()
[docs] def changingAxis(self, axisIndex, parName):
"""must be called before cutting out along axisIndex.
axIndex is a FITS (1-based) axis index axIndex, parName the name of the
parameter that causes the cutout.
This will simply return if nobody has called changingAxis with that index
before and raise a ValidationError otherwise. Data functions doing a cutout
must call this before doing so; if they don't the cutout will probably be
wrong when two conflicting constraints are given.
"""
if axisIndex in self._axesTouched:
raise base.ValidationError("Attempt to cut out along axis %d that"
" has been modified before."%axisIndex, parName)
self._axesTouched.add(axisIndex)
[docs] def cleanup(self):
for closable in self.toClose:
closable.close()
[docs]class DLFITSProductDescriptor(FITSProductDescriptor):
"""A SODA descriptor for FITS files with datalink product paths.
Use is as descClass in //soda#fits_genDesc when the product table
has a datalink as the product.
"""
def __init__(self, *args, **kwargs):
kwargs["accessPath"] = os.path.join(
base.getConfig("inputsDir"),
kwargs["accref"])
FITSProductDescriptor.__init__(self, *args, **kwargs)
[docs]def getFITSDescriptor(pubDID, accrefPrefix=None,
cls=FITSProductDescriptor, qnd=False):
"""returns a datalink descriptor for a FITS file.
This is the implementation of fits_genDesc and should probably reused
when making more specialised descriptors.
"""
try:
accref = rscdef.getAccrefFromStandardPubDID(pubDID)
except ValueError:
return DatalinkFault.NotFoundFault(pubDID,
"Not a pubDID from this site.")
return cls.fromAccref(pubDID, accref, accrefPrefix, qnd=qnd)
class _File(static.File):
"""A nevow static.File with a pre-determined type.
"""
def __init__(self, path, mediaType):
static.File.__init__(self, path)
self.type = mediaType
self.encoding = None
class _TemporaryFile(_File):
"""A nevow resource that spits out a file and then deletes it.
This is a helper class for DataFunctions and DataFormatters, available
there as TemporaryFile.
"""
def render(self, request):
request.notifyFinish().addBoth(
self._cleanup)
return _File.render(self, request)
def _cleanup(self, result):
self.remove()
return result
[docs]class DescriptorGenerator(rscdef.ProcApp):
"""A procedure application for making product descriptors for PUBDIDs
Despite the name, a descriptor generator has to *return* (not yield)
a descriptor instance. While this could be anything, it is recommended
to derive custom classes from prodocols.datalink.ProductDescrpitor, which
exposes essentially the columns from DaCHS' product table as attributes.
This is what you get when you don't define a descriptor generator
in your datalink core.
Before writing your own, see if one of the predefined descriptor
generators work for you; see `Descriptor Generators`_ below.
The following names are available to the code:
- pubDID -- the pubDID to be resolved
- args -- all the arguments that came in from the web
(these should not usually be necessary for making the descriptor
and are completely unparsed at this point)
- FITSProductDescriptor -- the base class of FITS product descriptors
- DLFITSProductDescriptor -- the same, just for when the product table
has a datalink.
- ProductDescriptor -- a base class for your own custom descriptors
- DatalinkFault -- use this when flagging failures
- soda -- contents of the soda module for convenience
If you made your pubDID using the ``getStandardPubDID`` rowmaker function,
and you need no additional logic within the descriptor,
the default (//soda#fromStandardPubDID) should do.
If you need to derive custom descriptor classes, you can see the base
class under the name ProductDescriptor; there's also
FITSProductDescriptor and DatalinkFault in each proc's namespace.
If your Descriptor does not actually refer to something in the
product table, it is likely that you want to set the descriptor's
``suppressAutoLinks`` attribute to True. This will stop DaCHS
from attempting to add automatic #this and #preview links.
"""
name_ = "descriptorGenerator"
requiredType = "descriptorGenerator"
formalArgs = "pubDID, args"
additionalNamesForProcs = {
"FITSProductDescriptor": FITSProductDescriptor,
"DLFITSProductDescriptor": DLFITSProductDescriptor,
"ProductDescriptor": ProductDescriptor,
"getFITSDescriptor": getFITSDescriptor,
"DatalinkFault": DatalinkFault,
"soda": soda,
}
[docs]class LinkDef:
"""a definition of a datalink related document.
These are constructed at least with:
- the pubDID (as a string)
- the access URL (as a string)
In addition, we accept the remaining column names from
`//datalink#dlresponse`_ as keyword arguments, except we use
camel case here, i.e.,
``description``, ``semantics``, ``contentType``, ``contentLength``,
``localSemantics``.
You usually do not want to construct LinkDefs directly. Use
the makeLink or makeLinkFromFile methods on descriptors instead.
Also, define semantics via the attribute on `element metaMaker`_
whenever you can; that will improve error messages.
"""
def __init__(self, pubDID, accessURL,
serviceType=None,
errorMessage=None,
description=None,
semantics=DEFAULT_SEMANTICS,
contentType=None,
contentLength=None,
localSemantics=None,
contentQualifier=None):
ID = pubDID #noflake: used in locals()
del pubDID
self.dlRow = locals()
[docs] @classmethod
def fromFile(cls, localPath, description, semantics,
service, contentType=None, localSemantics=None,
contentQualifier=None,
suppressMissing=False):
"""constructs a LinkDef based on a local file.
You must give localPath (which may be resdir-relative), description and
semantics are mandatory. ContentType and contentSize will normally be
determined by DaCHS.
You must also pass in the service used to retrieve the file. This
must allow the static renderer and have a staticData property. It should
normally be the datalink service itself, which in a metaMaker
is accessible as self.parent.parent. It is, however, legal
to reference other suitable services (use self.parent.rd.getById or
base.resolveCrossId)
If you pass suppressMissing=True, a link to a non-existing file
will be skipped rather than create a missing datalink.
"""
baseDir = service.rd.resdir
localPath = os.path.join(baseDir, localPath)
pubDID = utils.stealVar("descriptor").pubDID
staticPath = os.path.join(baseDir,
service.getProperty("staticData"))
if not os.path.isfile(localPath):
if suppressMissing:
return None
else:
return DatalinkFault.NotFoundFault(pubDID, "No file"
" for linked item", semantics=semantics, description=description)
elif not os.access(localPath, os.R_OK):
return DatalinkFault.AutorizationFault(pubDID, "Linked"
" item not readable", semantics=semantics, description=description)
try:
svcPath = utils.getRelativePath(localPath, staticPath)
except ValueError:
return LinkDef(pubDID, errorMessage="FatalFault: Linked item"
" not accessible through the given service",
semantics=semantics, description=description,
localSemantics=localSemantics)
ext = os.path.splitext(localPath)[-1]
contentType = (contentType
or static.File.contentTypes.get(ext, "application/octet-stream"))
return cls(pubDID,
service.getURL("static")+"/"+svcPath,
description=description, semantics=semantics,
contentType=contentType,
contentLength=os.path.getsize(localPath),
localSemantics=localSemantics,
contentQualifier=contentQualifier)
[docs] def asDict(self):
"""returns the link definition in a form suitable for ingestion
in //datalink#dlresponse.
"""
return {
"ID": self.dlRow["ID"],
"access_url": self.dlRow["accessURL"],
"service_def": self.dlRow["serviceType"],
"error_message": self.dlRow["errorMessage"],
"description": self.dlRow["description"],
"semantics": self.dlRow["semantics"],
"content_type": self.dlRow["contentType"],
"content_length": self.dlRow["contentLength"],
"local_semantics": self.dlRow["localSemantics"],
"content_qualifier": self.dlRow["contentQualifier"]}
[docs]class ProcLinkDef:
"""a definition of a datalink processing service other than self's dlget.
Whenever you datalink service allows a dlget, a descriptor for that
will be generated based on the keys returned by the metamaker. Sometimes,
you want to provide a second processing service within a single datalink
document. In that case, yield a ProcLinkDef objects constructed
with a dlget-allowing datalink service in its second argument. As usual
for LinkDefs, the first argument must be the DID for which to generate
the service.
The description of this link will be the description of that service.
"""
def __init__(self, pubDID, dlSvc):
if (not isinstance(dlSvc, svcs.Service)
or not "dlget" in dlSvc.allowed):
raise base.StructureError("ProcLinkDef must be constructed"
" with a dlget-renderable service.")
self.pubDID = pubDID
self.svcId = None
self.description = base.getMetaText(dlSvc, "description",
default="Another interactive service on this dataset.")
# this is for stealing by adaptForRenderer
args = {"ID": [self.pubDID]} #noflake: for stealing
dlget = svcs.getRenderer("dlget")
dlgetCore = dlSvc.core.adaptForRenderer(
dlget, {"dbLimit": None})
self.serviceDescriptor = _ServiceDescriptor(
self.pubDID,
dlgetCore.inputKeys,
"dlget",
dlSvc)
[docs] def asDict(self):
"""returns a datalink row for this service.
This is called by //datalink#make_response for the table row.
"""
if self.svcId is None:
raise base.Error("You need to arrange for asVOT to be called first"
f" on this {self.__class__}.")
return {
"ID": self.pubDID,
"service_def": self.svcId,
"description": self.description,
"semantics": "#proc"}
[docs] def asVOT(self, ctx):
"""returns a VOTable fragment for the service descriptor
This called by runForMeta. It currently constructs an actual
Datalink core for every pubDID, which is *relatively* expensive.
If you find your datalink service is prohibitively slow, we may
want to re-think this.
"""
self.svcId = ctx.getOrMakeIdFor(
self.serviceDescriptor, suggestion="ext_svc")
return self.serviceDescriptor.asVOT(ctx)
class _ServiceDescriptor(object):
"""An internal descriptor for one of our services.
These are serialized into service resources in VOTables.
Basically, these collect input keys, a pubDID, as well as any other
data we might need in service definition.
"""
def __init__(self, pubDID, inputKeys, rendName, dlSvc):
self.pubDID, self.inputKeys = pubDID, inputKeys
self.rendName, self.dlSvc = rendName, dlSvc
if self.pubDID:
# if we're fixed to a specific pubDID, reflect that in the ID
# field -- this is how clients know which dataset to pull
# from datalink documents.
for index, ik in enumerate(self.inputKeys):
if ik.name=="ID":
ik = ik.copy(None)
ik.set(pubDID)
self.inputKeys[index] = ik
@property
def description(self):
return base.getMetaText(self.dlSvc,
"description",
default="An interactive service on this dataset.")
@property
def title(self):
return base.getMetaText(self.dlSvc,
"title",
default="Processing service with id {}".format(self.dlSvc.id))
def asVOT(self, ctx, linkIdTo=None):
"""returns VOTable stanxml for a description of this service.
This is a RESOURCE as required by Datalink.
linkIdTo is used to support data access descriptors embedded
in discovery queries. It is the id of the column containing
the identifiers. SSA can already provide this.
"""
paramsByName, stcSpecs = {}, set()
for param in self.inputKeys:
paramsByName[param.name] = param
if param.stc:
stcSpecs.add(param.stc)
def getIdFor(colRef):
colRef.toParam = True
return ctx.getOrMakeIdFor(paramsByName[colRef.dest],
suggestion=colRef.dest)
res = V.RESOURCE(ID=ctx.getOrMakeIdFor(self, suggestion="proc_svc"),
name=self.dlSvc.id,
type="meta",
utype="adhoc:service")[
V.DESCRIPTION[self.description],
V.INFO(name="title", value=self.title),
[modelgroups.marshal_STC(ast, getIdFor)[0]
for ast in stcSpecs],
V.PARAM(arraysize="*", datatype="char",
name="accessURL", ucd="meta.ref.url",
value=self.dlSvc.getURL(self.rendName))]
standardId = base.getMetaText(self.dlSvc, "standardID", default=None)
if standardId is None:
standardId = {
"dlasync": ASYNC_STANDARD_ID,
"dlget": SYNC_STANDARD_ID}.get(self.rendName)
if standardId:
res[
V.PARAM(arraysize="*", datatype="char",
name="standardID", value=standardId)]
inputParams = V.GROUP(name="inputParams")
res = res[inputParams]
for ik in self.inputKeys:
param = ctx.addID(ik,
votablewrite.makeFieldFromColumn(ctx, V.PARAM, ik))
if linkIdTo and ik.name=="ID":
param = param(ref=linkIdTo)
if ik.hasProperty("defaultForForm"):
param[
V.LINK(action="rdf", content_role="#pre-set",
value=str(ik.getProperty("defaultForForm")))]
inputParams[param]
return res
[docs]class DataFunction(rscdef.ProcApp):
"""A procedure application that generates or modifies data in a processed
data service.
All these operate on the data attribute of the product descriptor.
The first data function plays a special role: It *must* set the data
attribute (or raise some appropriate exception), or a server error will
be returned to the client.
What is returned depends on the service, but typcially it's going to
be a table or products.*Product instance.
Data functions can shortcut if it's evident that further data functions
can only mess up (i.e., if the do something bad with the data attribute);
you should not shortcut if you just *think* it makes no sense to
further process your output.
To shortcut, raise either of FormatNow (falls though to the formatter,
which is usually less useful) or DeliverNow (directly returns the
data attribute; this can be used to return arbitrary chunks of data).
The following names are available to the code:
- descriptor -- whatever the DescriptorGenerator returned
- args -- all the arguments that came in from the web.
In addition to the usual names available to ProcApps, data functions have:
- FormatNow -- exception to raise to go directly to the formatter
- DeliverNow -- exception to raise to skip all further formatting
and just deliver what's currently in descriptor.data
- File(path, type) -- if you just want to return a file on disk, pass
its path and media type to File and assign the result to
descriptor.data.
- TemporaryFile(path,type) -- as File, but the disk file is
unlinked after use
- makeData -- the rsc.makeData function
- soda -- the protocols.soda module
- RemoteProduct(url) -- 301-redirects to URL when rendered
"""
name_ = "dataFunction"
requiredType = "dataFunction"
formalArgs = "descriptor, args"
additionalNamesForProcs = {
"FormatNow": FormatNow,
"DeliverNow": DeliverNow,
"File": _File,
"TemporaryFile": _TemporaryFile,
"makeData": rsc.makeData,
"soda": soda,
"DataFromURL": products.RemoteProduct.fromURL,
}
[docs]class DatalinkCoreBase(svcs.Core, base.ExpansionDelegator):
"""Basic functionality for datalink cores.
This is pulled out of the datalink core proper as it is used without
the complicated service interface sometimes, e.g., by SSAP.
"""
_descriptorGenerator = base.StructAttribute("descriptorGenerator",
default=base.NotGiven,
childFactory=DescriptorGenerator,
description="Code that takes a PUBDID and turns it into a"
" product descriptor instance. If not given,"
" //soda#fromStandardPubDID will be used.",
copyable=True)
_metaMakers = base.StructListAttribute("metaMakers",
childFactory=MetaMaker,
description="Code that takes a data descriptor and either"
" updates input key options or yields related data.",
copyable=True)
_dataFunctions = base.StructListAttribute("dataFunctions",
childFactory=DataFunction,
description="Code that generates of processes data for this"
" core. The first of these plays a special role in that it"
" must set descriptor.data, the others need not do anything"
" at all.",
copyable=True)
_dataFormatter = base.StructAttribute("dataFormatter",
default=base.NotGiven,
childFactory=DataFormatter,
description="Code that turns descriptor.data into a nevow resource"
" or a mime, content pair. If not given, the renderer will be"
" returned descriptor.data itself (which will probably not usually"
" work).",
copyable=True)
_inputKeys = rscdef.ColumnListAttribute("inputKeys",
childFactory=svcs.InputKey,
description="A parameter to one of the proc apps (data functions,"
" formatters) active in this datalink core; no specific relation"
" between input keys and procApps is supposed; all procApps are passed"
" all arguments. Conventionally, you will write the input keys in"
" front of the proc apps that interpret them.",
copyable=True)
# This tells the context grammar to fail if someone tries to pass
# in parameters we don't have input keys for.
rejectExtras = True
[docs] def completeElement(self, ctx):
if self.descriptorGenerator is base.NotGiven:
self.descriptorGenerator = MS(DescriptorGenerator,
procDef=base.resolveCrossId("//soda#fromStandardPubDID"))
if self.dataFormatter is base.NotGiven:
self.dataFormatter = MS(DataFormatter,
procDef=base.caches.getRD("//soda").getById("trivialFormatter"))
self.inputKeys.append(MS(svcs.InputKey, name="ID", type="text",
ucd="meta.id;meta.main",
multiplicity="multiple",
std=True,
description="The publisher DID of the dataset of interest"))
if self.inputTable is base.NotGiven:
self.inputTable = MS(svcs.InputTD, inputKeys=self.inputKeys)
# this is a cheat for service.getTableSet to pick up the datalink
# table. If we fix this for TAP, we should fix it here, too.
self.queriedTable = base.caches.getRD("//datalink").getById(
"dlresponse")
super().completeElement(ctx)
def _iterAutoLinks(self, descriptor, service):
"""yields automatic links for descriptor.
That's #that and #preview unless some special conditions apply.
"""
if (not isinstance(descriptor, ProductDescriptor)
or hasattr(descriptor, "suppressAutoLinks")):
return
# if the accref is a datalink document, go through dlget itself.
if descriptor.mime=="application/x-votable+xml;content=datalink":
if isinstance(descriptor, DLFITSProductDescriptor):
# this is perhaps a bit insane, but I like image/fits
# for non-tabular FITS files, and that's what DLFITS
# deals with.
mediaType = "image/fits"
else:
mediaType = formats.guessMediaType(descriptor.accref),
yield LinkDef(descriptor.pubDID,
service.getURL("dlget")+"?ID=%s"%urllib.parse.quote(
descriptor.pubDID),
description="The full dataset.",
contentType=mediaType,
contentLength=descriptor.estimateSize(),
contentQualifier=getattr(descriptor, "contentQualifier", None),
semantics="#this")
else:
yield LinkDef(descriptor.pubDID,
products.makeProductLink(descriptor.accref),
description="The full dataset.",
contentType=descriptor.mime,
contentLength=descriptor.estimateSize(),
contentQualifier=getattr(descriptor, "contentQualifier", None),
semantics="#this")
if getattr(descriptor, "preview", None):
if descriptor.preview.startswith("http"):
# deliver literal links directly
previewLink = descriptor.preview
else:
# It's either AUTO or a local path; in both cases, let
# the products infrastructure worry about it.
previewLink = products.makeProductLink(
products.RAccref(descriptor.accref,
inputDict={"preview": True}))
yield LinkDef(descriptor.pubDID,
previewLink,
description="A preview for the dataset.",
contentType=descriptor.previewMime,
semantics="#preview")
[docs] def getDatalinksResource(self, ctx, service):
"""returns a VOTable RESOURCE element with the data links.
This does not contain the actual service definition elements, but it
does contain references to them.
You must pass in a VOTable context object ctx (for the management
of ids). If this is the entire content of the VOTable, use
votablewrite.VOTableContext() there.
"""
internalLinks = []
internalLinks.extend(
LinkDef(
s.pubDID,
None,
serviceType=ctx.getOrMakeIdFor(s),
description=s.description,
semantics="#proc")
for s in self.datalinkEndpoints)
# for all descriptors that are products, make a full dataset
# available through the data access, possibly also adding a preview.
for d in self.descriptors:
for link in self._iterAutoLinks(d, service):
internalLinks.append(link)
data = rsc.makeData(
base.caches.getRD("//datalink").getById("make_response"),
forceSource=self.datalinkLinks+internalLinks+self.errors)
data.setMeta("_type", "results")
data.contributingMetaCarriers.append(service)
return votablewrite.makeResource(
votablewrite.VOTableContext(tablecoding="td"),
data)
[docs]class DatalinkCore(DatalinkCoreBase):
"""A core for processing datalink and processed data requests.
The input table of this core is dynamically generated from its
metaMakers; it makes no sense at all to try and override it.
See `Datalink and SODA`_ for more information.
In contrast to "normal" cores, one of these is made (and destroyed)
for each datalink request coming in. This is because the interface
of a datalink service depends on the request's value(s) of ID.
The datalink core can produce both its own metadata and data generated.
It is the renderer's job to tell them apart.
"""
name_ = "datalinkCore"
datalinkType = "application/x-votable+xml;content=datalink"
overflowed = False
workerSystem = dlasync.DL_WORKER
# the core will be specially and non-cacheably adapted for these
# renderers (ssap.xml is in here for legacy getData):
datalinkAdaptingRenderers = frozenset([
"form", "dlget", "dlmeta", "dlasync", "ssap.xml"])
def _getPubDIDs(self, args):
"""returns a list of pubDIDs from args["ID"].
args is supposed to be a nevow request.args-like dict, where the PubDIDs
are taken from the ID parameter. If it's atomic, it'll be expanded into
a list. If it's not present, a ValidationError will be raised.
"""
pubDIDs = args.get("ID")
if not pubDIDs:
pubDIDs = []
elif not isinstance(pubDIDs, list):
pubDIDs = [pubDIDs]
return pubDIDs
[docs] def adaptForDescriptors(self, renderer, descriptors, maxRec):
"""returns a core for renderer and a sequence of ProductDescriptors.
This method is mainly for helping adaptForRenderer. Do read the
docstring there.
"""
try:
allowedForSvc = set(utils.stealVar("allowedRendsForStealing"))
except ValueError:
allowedForSvc = []
overflowed = False
linkDefs, endpoints, errors = [], [], []
for curInd, descriptor in enumerate(descriptors):
if isinstance(descriptor, DatalinkFault):
errors.append(descriptor)
else:
lds, inputKeys, lerrs = self.getMetaForDescriptor(descriptor)
linkDefs.extend(lds)
errors.extend(lerrs)
# ssap expects the first renderer here to be dlget, so don't
# remove it or move it back; also: only declare services
# if they accept more than just ID; with just ID, it's obviously
# just a bytestream generator for #this rather than a proper
# service.
for rendName in ["dlget", "dlasync"]:
if len(inputKeys)>1 and rendName in allowedForSvc:
endpoints.append(
_ServiceDescriptor(
descriptor.pubDID,
inputKeys,
rendName,
self.parent))
if (renderer.name=="dlmeta"
and maxRec is not None
and len(linkDefs)+len(errors)>=maxRec):
descriptors[curInd+1:] = []
overflowed = True
break
# dispatch on whether we're making metadata (case 1) or actual
# data (case 2)
inputKeys = self.inputKeys[:]
if renderer.name=="dlmeta":
pass # RESPONSEFORMAT and friends added through pql#DALIPars
else:
# in dlget, we don't actually know what to do with multiple
# descriptors. But we will fault out if the last (and usually
# only one) has a problem.
if not descriptors:
raise base.ValidationError("ID is mandatory with dlget",
"ID")
# If there are multiple endpoints, we take the parameters from
# the last one. If there are none, we keep the ID of the
# main service interface.
if endpoints:
inputKeys = endpoints[-1].inputKeys
if isinstance(descriptors[-1], DatalinkFault):
descriptors[-1].raiseException()
res = self.change(inputTable=MS(svcs.InputTD,
inputKeys=inputKeys, exclusive=True),
inputKeys=inputKeys)
# again dispatch on meta or data, this time as regards what to run.
if renderer.name=="dlmeta":
res.run = res.runForMeta
else:
res.run = res.runForData
res.nocache = True
res.datalinkLinks = linkDefs
res.datalinkEndpoints = endpoints
res.descriptors = descriptors
res.errors = errors
res.overflowed = overflowed
return res
[docs] def adaptForRenderer(self, renderer, queryMeta):
"""returns a core for a specific product.
The ugly thing about datalink in DaCHS' architecture is that its
interface (in terms of, e.g., inputKeys' values children) depends
on the arguments themselves, specifically the pubDID.
The workaround is to abuse the renderer-specific getCoreFor,
ignore the renderer and instead steal an "args" variable from
somewhere upstack. Nasty, but for now an acceptable solution.
It is particularly important to never let service cache the
cores returned for the dl* renderers; hence to "nocache" magic.
This tries to generate all datalink-relevant metadata in one go
and avoid calling the descriptorGenerator(s) more than once per
pubDID. It therefore adds datalinkLinks, datalinkEndpoints,
and descriptors attributes. These are used later
in either metadata generation or data processing.
An additional complication is MAXREC; when adaptForDescriptors
will crop the descriptor list if more than MAXREC links (incl.
errors) are generated if the renderer is dlmeta. On dlget,
MAXREC is ignored; we don't really do multi-ID dlget right
now, anyway.
The latter will in general use only the last pubDID passed in.
Therefore, this last pubDID determines the service interface
for now. Perhaps we should be joining the inputKeys in some way,
though, e.g., if we want to allow retrieving multiple datasets
in a tar file? Or to re-use the same service for all pubdids?
"""
# if we're not speaking real datalink, return right away (this will
# be cached, so this must never happen for actual data)
if not renderer.name in self.datalinkAdaptingRenderers:
return self
try:
args = utils.stealVar("args")
if not isinstance(args, dict):
# again, we're not being called in a context with a pubdid
raise ValueError("No pubdid")
args = utils.CaseSemisensitiveDict(args)
except ValueError:
# no arguments found: decide later on whether to fault out.
args = {"ID": []}
pubDIDs = self._getPubDIDs(args)
descGen = self.descriptorGenerator.compile(self)
descriptors = []
for pubDID in pubDIDs:
try:
desc = descGen(pubDID, args)
if desc is None:
raise base.NotFoundError(pubDID, "dataset",
"this site's data holdings")
if isinstance(desc, DatalinkFault):
# fix non-informational semantics to #this, because
# that's what making a descriptor is almost always about.
if desc.semantics==soda.DEFAULT_SEMANTICS:
desc.semantics = "#this"
descriptors.append(desc)
except svcs.RedirectBase:
# let through redirects even for dlmeta; these are rendered
# further up in the call chain.
raise
except Exception as ex:
# if we're dlget, just let exceptions through (e.g., authentication),
# also with a view to pushing out useful error messages.
if renderer.name!="dlmeta":
raise
else:
# In dlmeta, convert to fault rows.
if isinstance(ex, base.NotFoundError):
descriptors.append(DatalinkFault.NotFoundFault(pubDID,
utils.safe_str(ex), semantics="#this"))
else:
if base.DEBUG:
base.ui.notifyError("Error in datalink descriptor generator: %s"%
utils.safe_str(ex))
descriptors.append(DatalinkFault.Fault(pubDID,
utils.safe_str(ex), semantics="#this"))
return self.adaptForDescriptors(
renderer, descriptors, queryMeta["dbLimit"])
def _iterAccessResources(self, ctx, service):
"""iterates over the VOTable RESOURCE elements necessary for
the datalink rows produced by service.
"""
for dlSvc in self.datalinkEndpoints:
yield dlSvc.asVOT(ctx)
for linkDef in self.datalinkLinks:
# normal LinkDefs can also contribute VOTable material directly;
# presumably, that'll always be RESOURCE blocks of dlget services,
# but who knows: perhaps we'll one day want to return an extra
# info or something?
if hasattr(linkDef, "asVOT"):
yield linkDef.asVOT(ctx)
[docs] def runForData(self, service, inputTable, queryMeta):
"""returns a data set processed according to inputTable's parameters.
"""
try:
args = inputTable.getParamDict()
if not self.dataFunctions:
raise base.DataError("This datalink service cannot process data")
descriptor = self.descriptors[-1]
self.dataFunctions[0].compile(self)(descriptor, args)
if descriptor.data is None:
raise base.ReportableError("Internal Error: a first data function did"
" not create data.")
for func in self.dataFunctions[1:]:
try:
func.compile(self)(descriptor, args)
except FormatNow:
break
except DeliverNow:
return descriptor.data
res = self.dataFormatter.compile(self)(descriptor, args)
return res
finally:
self.descriptors[-1].cleanup()
self.finalize()
[docs] def finalize(self):
"""breaks circular references to make the garbage collector's job
easier.
The core will no longer function once this has been called.
"""
utils.forgetMemoized(self)
for proc in itertools.chain(self.metaMakers, self.dataFunctions):
utils.forgetMemoized(proc)
utils.forgetMemoized(self.descriptorGenerator)
if self.dataFormatter:
utils.forgetMemoized(self.dataFormatter)
self.breakCircles()
self.run = None
[docs]def makeDatalinkServiceDescriptor(ctx, service, tableDef, columnName):
"""returns a datalink descriptor for a datalink (dlmeta) service).
What's returned is gavo.votable elements.
ctx is a votablewrite VOTableContext that manages the IDs of the elements
involved, service is the datalink service as a svc.Service instance,
tableDef is the rscdef.TableDef instance with the rows the datalink service
operates on, and columnName names the column within table to take
the datalink's ID parameter from.
"""
return V.RESOURCE(type="meta", utype="adhoc:service", name=service.id)[
V.DESCRIPTION[base.getMetaText(service, "description",
default="A Datalink service to retrieve the data set"
" as well as additional related files, plus possibly services"
" for server-side processing.", propagate=False)],
V.PARAM(name="standardID", datatype="char", arraysize="*",
value=LINKS_STANDARD_ID),
V.PARAM(name="accessURL", datatype="char", arraysize="*",
value=service.getURL("dlmeta")),
V.GROUP(name="inputParams")[
V.PARAM(name="ID", datatype="char", arraysize="*",
ref=ctx.getOrMakeIdFor(
tableDef.getColumnByName(columnName), suggestion=columnName),
ucd="meta.id;meta.main")]]