[api] Make autocast API constistant, closes #248554

authorVincent Michel <vincent.michel@logilab.fr>
changeseta458f2793377
branchdefault
phasepublic
hiddenno
parent revision#e8f0139b1984 Added tag nazca-version-0.5.1, nazca-debian-version-0.5.1-1, nazca-centos-version-0.5.1-1 for changeset 4aedb4dd72e0
child revision#5689a4cc4915 [distance] Add safety belt on geographical distance units, closes #248555, #124c0e3b5098 [distance] Add safety belt on geograpraphical distance units, closes #248555
files modified by this revision
test/test_dataio.py
utils/dataio.py
# HG changeset patch
# User Vincent Michel <vincent.michel@logilab.fr>
# Date 1400710321 0
# Wed May 21 22:12:01 2014 +0000
# Node ID a458f2793377fa866e46fa0c4c4d09e0f720b230
# Parent e8f0139b1984d4598e0301d362730c6897fdb849
[api] Make autocast API constistant, closes #248554

diff --git a/test/test_dataio.py b/test/test_dataio.py
@@ -107,11 +107,11 @@
1          self.assertEqual([[1, ('12', 19), u'apple', u'house'],
2                            [2, ('21.9', 19), u'stramberry', u'horse'],
3                            [3, ('23', 2.17), u'cherry', u'flower']], data)
4 
5          data = parsefile(path.join(TESTDIR, 'data', 'file2parse'),
6 -                         [0, (2, 3), 4, 1], delimiter=',', use_autocast=False)
7 +                         [0, (2, 3), 4, 1], delimiter=',', autocast_data=False)
8          self.assertEqual([['1', ('12', '19'), 'apple', 'house'],
9                            ['2', ('21.9', '19'), 'stramberry', 'horse'],
10                            ['3', ('23', '2.17'), 'cherry', 'flower']], data)
11 
12 
@@ -224,11 +224,11 @@
13                                   ' ?ville db-owl:populationTotal ?population .'
14                                   ' ?ville foaf:name ?name .'
15                                   ' ?ville db-prop:longitude ?long .'
16                                   ' ?ville db-prop:latitude ?lat .'
17                                   ' FILTER (?population > 1000)'
18 -                                 '} LIMIT 100', indexes=[0, 1, (2, 3)], autocaste_data=False)
19 +                                 '} LIMIT 100', indexes=[0, 1, (2, 3)], autocast_data=False)
20          self.assertEqual(len(alignset), 100)
21          self.assertFalse(isinstance(alignset[0][2][0], float))
22 
23      def test_rqlquery(self):
24          results = rqlquery('http://www.cubicweb.org',
diff --git a/utils/dataio.py b/utils/dataio.py
@@ -66,11 +66,11 @@
25      password = sourceinfo['password']
26      _, cnx = dbapi.in_memory_repo_cnx(config, login, password=password)
27      req = cnx.request()
28      return req
29 
30 -def rqlquery(host, rql, indexes=None, formatopt=None, _cache_cnx={}, **kwargs):
31 +def rqlquery(host, rql, indexes=None, formatopt=None, autocast_data=True, _cache_cnx={}, **kwargs):
32      """ Run the rql query on the given cubicweb host
33      Additional arguments can be passed to be properly substitued
34      in the execute() function for appid accces.
35      """
36      if host.startswith('http://'):
@@ -81,11 +81,11 @@
37          filehandle = urllib.urlopen('%(host)s/view?'
38                                      'rql=%(rql)s&vid=csvexport'
39                                      % {'rql': rql, 'host': host})
40          filehandle.readline()#Skip the first line
41          return parsefile(filehandle, delimiter=';', indexes=indexes,
42 -                         formatopt=formatopt);
43 +                         formatopt=formatopt, autocast_data=autocast_data);
44      else:
45          # By appid
46          if host in _cache_cnx:
47              cnx = _cache_cnx[host]
48          else:
@@ -118,23 +118,23 @@
49          if raise_on_error:
50              raise RuntimeError('Error in sparql query', err)
51          else:
52              return []
53 
54 -def sparqlquery(endpoint, query, indexes=None, autocaste_data=True, raise_on_error=False):
55 +def sparqlquery(endpoint, query, indexes=None, autocast_data=True, raise_on_error=False):
56      """ Run the sparql query on the given endpoint, and wrap the items in the
57      indexes form. If indexes is empty, keep raw output"""
58      results = []
59      rawresults = _sparqlexecute(endpoint, query, raise_on_error)
60      if not rawresults:
61          return results
62      labels = rawresults['head']['vars']
63      indexes = indexes or []
64 -    if autocaste_data:
65 +    if autocast_data:
66          transform = autocast
67      else:
68 -        def transform(*args): return args
69 +        def transform(x): return x
70      for raw in rawresults["results"]["bindings"]:
71          data = []
72          if not indexes:
73              data = [transform(raw[label]['value']) for label in labels]
74          else:
@@ -175,11 +175,11 @@
75  ###############################################################################
76  ### FILE FUNCTIONS ############################################################
77  ###############################################################################
78  def parsefile(filename, indexes=None, nbmax=None, delimiter='\t',
79                encoding='utf-8', field_size_limit=None,
80 -              use_autocast=True, formatopt=None):
81 +              autocast_data=True, formatopt=None):
82      """ Parse the file (read ``nbmax`` line at maximum if given). Each
83          line is splitted according ``delimiter`` and only ``indexes`` are kept
84 
85          eg : The file is :
86                  1, house, 12, 19, apple
@@ -216,11 +216,11 @@
87              yield [cell.strip() for cell in row]
88          csvfile.close()
89 
90 
91      # Autocast if asked
92 -    if use_autocast:
93 +    if autocast_data:
94          deffunc = lambda x: autocast(x, encoding)
95      else:
96          deffunc = lambda x: x
97      result = []
98      indexes = indexes or []