This file is indexed.

/usr/share/pyshared/rdflib/sparql/sparqlGraph.py is in python-rdflib 2.4.2-1ubuntu1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
from rdflib.Graph import Graph, ConjunctiveGraph

class SPARQLGraph(object):
    """
    A subclass of Graph with a few extra SPARQL bits.
    """
    SPARQL_DATASET=0
    NAMED_GRAPH=1
    __slots__ = ("graphVariable","DAWG_DATASET_COMPLIANCE","identifier","graphKind","graph")
    def __init__(self, graph, graphVariable = None, dSCompliance = False):
        assert not graphVariable or graphVariable[0]!='?',repr(graphVariable)
        self.graphVariable = graphVariable
        self.DAWG_DATASET_COMPLIANCE = dSCompliance
        self.graphKind=None
        if graph is not None:
            self.graph = graph # TODO
            #self.store = graph.store
            if isinstance(graph,ConjunctiveGraph):
                self.graphKind = self.SPARQL_DATASET
                self.identifier = graph.default_context.identifier
            else:
                self.graphKind = self.NAMED_GRAPH
                self.identifier = graph.identifier
        #super(SPARQLGraph, self).__init__(store, identifier)

    def setupGraph(self,store,graphKind=None):
        gKind = graphKind and graphKind or self.graphKind
        self.graph = gKind(store,self.identifier)

    def __reduce__(self):
        return (SPARQLGraph,
                (None,
                 self.graphVariable,
                 self.DAWG_DATASET_COMPLIANCE),
                self.__getstate__())
        
    def __getstate__(self):
        return (self.graphVariable,
                self.DAWG_DATASET_COMPLIANCE,
                self.identifier)#,
                #self.graphKind)

    def __setstate__(self, arg):
        #gVar,flag,identifier,gKind=arg
        gVar,flag,identifier=arg
        self.graphVariable=gVar
        self.DAWG_DATASET_COMPLIANCE=flag
        self.identifier=identifier
        #self.graphKind=gKind
        #self.graph=Graph(store,identifier)

    ##############################################################################################################
    # Clustering methods
    def _clusterForward(self,seed,Cluster) :
        """Cluster the triple store: from a seed, transitively get all
        properties and objects in direction of the arcs.

        @param seed: RDFLib Resource

        @param Cluster: a L{sparqlGraph} instance, that has to be
        expanded with the new arcs
        """
        try :
            # get all predicate and object pairs for the seed.
            # *If not yet in the new cluster, then go with a recursive round with those*
            for (p,o) in self.graph.predicate_objects(seed) :
                if not (seed,p,o) in Cluster.graph :
                    Cluster.add((seed,p,o))
                    self._clusterForward(p,Cluster)
                    self._clusterForward(o,Cluster)
        except :
            pass


    def clusterForward(self,seed,Cluster=None) :
        """
        Cluster the triple store: from a seed, transitively get all
        properties and objects in direction of the arcs.

        @param seed: RDFLib Resource

        @param Cluster: another sparqlGraph instance; if None, a new
        one will be created. The subgraph will be added to this graph.

        @returns: The triple store containing the cluster

        @rtype: L{sparqlGraph}
        """
        if Cluster == None :
            Cluster = SPARQLGraph()

        # This will raise an exception if not kosher...
        check_subject(seed) #print "Wrong type for clustering (probably a literal): %s" % seed
        self._clusterForward(seed,Cluster)
        return Cluster


    def _clusterBackward(self,seed,Cluster) :
        """Cluster the triple store: from a seed, transitively get all
        properties and objects in backward direction of the arcs.

        @param seed: RDFLib Resource

        @param Cluster: a L{sparqlGraph} instance, that has to be
        expanded with the new arcs
        """
        try :
            for (s,p) in self.graph.subject_predicates(seed) :
                if not (s,p,seed) in Cluster.graph :
                    Cluster.add((s,p,seed))
                    self._clusterBackward(s,Cluster)
                    self._clusterBackward(p,Cluster)
        except :
            pass

    def clusterBackward(self,seed,Cluster=None) :
        """
        Cluster the triple store: from a seed, transitively get all
        properties and objects 'backward', ie, following the link back
        in the graph.

        @param seed: RDFLib Resource

        @param Cluster: another sparqlGraph instance; if None, a new
        one will be created. The subgraph will be added to this graph.

        @returns: The triple store containing the cluster

        @rtype: L{sparqlGraph}
        """
        if Cluster == None :
            Cluster = SPARQLGraph()

        # This will raise an exception if not kosher...
        check_object(seed) # print "Wrong type for clustering: %s" % seed
        self._clusterBackward(seed,Cluster)
        return Cluster

    def cluster(self,seed) :
        """
        Cluster up and down, by summing up the forward and backward
        clustering

        @param seed: RDFLib Resource

        @returns: The triple store containing the cluster

        @rtype: L{sparqlGraph}
        """
        raise "Am I getting here?"
        return self.clusterBackward(seed) + self.clusterForward(seed)