[processing] Allow test creation from history window

This commit is contained in:
Matthias Kuhn 2016-02-03 16:42:18 +01:00
parent 8adc871eef
commit f0629ea5a9
3 changed files with 139 additions and 65 deletions

View File

@ -88,7 +88,6 @@ class AlgorithmDialog(AlgorithmDialogBase):
def runAsBatch(self):
dlg = BatchAlgorithmDialog(self.alg)
dlg.show()
dlg.exec_()
def setParamValues(self):

View File

@ -128,7 +128,6 @@ class HistoryDialog(BASE, WIDGET):
TestTools.createTest(item.entry.text)
def showPopupMenu(self, point):
return
item = self.tree.currentItem()
if isinstance(item, TreeLogEntryItem):
if item.isAlg:

View File

@ -26,6 +26,9 @@ __copyright__ = '(C) 2013, Victor Olaya'
__revision__ = '$Format:%H$'
import os
import yaml
import hashlib
from osgeo import gdal
from osgeo.gdalconst import GA_ReadOnly
@ -33,81 +36,153 @@ from PyQt4.QtCore import QCoreApplication, QMetaObject
from PyQt4.QtGui import QMessageBox, QDialog, QVBoxLayout, QTextEdit
from processing.core.Processing import Processing
from processing.core.outputs import OutputNumber
from processing.core.outputs import OutputString
from processing.core.outputs import OutputRaster
from processing.core.outputs import OutputVector
from processing.tools import vector, dataobjects
from processing.core.outputs import (
OutputNumber,
OutputString,
OutputRaster,
OutputVector
)
from processing.core.parameters import (
ParameterRaster,
ParameterVector,
ParameterMultipleInput
)
def extractSchemaPath(filepath):
"""
Trys to find where the file is relative to the QGIS source code directory.
If it is already placed in the processing or QGIS testdata directory it will
return an appropriate schema and relative filepath
Args:
filepath: The path of the file to examine
Returns:
A tuple (schema, relative_file_path) where the schema is 'qgs' or 'proc'
if we can assume that the file is in this testdata directory.
"""
parts = []
schema = None
localpath = ''
path = filepath
part = True
while part:
(path, part) = os.path.split(path)
if part == 'testdata' and not localpath:
localparts = parts
localparts.reverse()
localpath = os.path.join(*localparts)
parts.append(part)
parts.reverse()
try:
testsindex = parts.index('tests')
except ValueError:
return '', filepath
if parts[testsindex - 1] == 'processing':
schema = 'proc'
return schema, localpath
def createTest(text):
s = ''
definition = {}
tokens = text[len('processing.runalg('):-1].split(',')
cmdname = (tokens[0])[1:-1]
methodname = 'test_' + cmdname.replace(':', '')
s += 'def ' + methodname + '(self):\n'
alg = Processing.getAlgorithm(cmdname)
execcommand = 'processing.runalg('
definition['name'] = 'Test ({})'.format(cmdname)
definition['algorithm'] = cmdname
params = []
results = {}
i = 0
for token in tokens:
if i < alg.getVisibleParametersCount() + 1:
if os.path.exists(token[1:-1]):
token = os.path.basename(token[1:-1])[:-4] + '()'
execcommand += token + ','
else:
execcommand += 'None,'
for param in alg.parameters:
if param.hidden:
continue
i += 1
s += '\toutputs=' + execcommand[:-1] + ')\n'
token = tokens[i]
if isinstance(param, ParameterVector):
filename = token[1:-1]
schema, filepath = extractSchemaPath(filename)
p = {
'type': 'vector',
'name': filepath
}
if not schema:
p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'
params.append(p)
elif isinstance(param, ParameterRaster):
filename = token[1:-1]
schema, filepath = extractSchemaPath(filename)
p = {
'type': 'raster',
'name': filepath
}
if not schema:
p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'
params.append(p)
elif isinstance(param, ParameterMultipleInput):
multiparams = token[1:-1].split(';')
newparam = []
for mp in multiparams:
schema, filepath = extractSchemaPath(mp)
newparam.append({
'type': 'vector',
'name': filepath
})
p = {
'type': 'multi',
'params': newparam
}
if not schema:
p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'
params.append(p)
else:
params.append(token)
definition['params'] = params
for i, out in enumerate(alg.outputs):
token = tokens[i - len(alg.outputs)]
i = -1 * len(alg.outputs)
for out in alg.outputs:
filename = (tokens[i])[1:-1]
if tokens[i] == unicode(None):
QMessageBox.critical(None, tr('Error'),
tr('Cannot create unit test for that algorithm execution. The '
'output cannot be a temporary file'))
return
s += "\toutput=outputs['" + out.name + "']\n"
if isinstance(out, (OutputNumber, OutputString)):
s += 'self.assertTrue(' + unicode(out) + ', output.value)\n'
if isinstance(out, OutputRaster):
results[out.name] = unicode(out)
elif isinstance(out, OutputRaster):
filename = token[1:-1]
dataset = gdal.Open(filename, GA_ReadOnly)
strhash = hash(unicode(dataset.ReadAsArray(0).tolist()))
s += '\tself.assertTrue(os.path.isfile(output))\n'
s += '\tdataset=gdal.Open(output, GA_ReadOnly)\n'
s += '\tstrhash=hash(unicode(dataset.ReadAsArray(0).tolist()))\n'
s += '\tself.assertEqual(strhash,' + unicode(strhash) + ')\n'
if isinstance(out, OutputVector):
layer = dataobjects.getObject(filename)
fields = layer.pendingFields()
s += '\tlayer=dataobjects.getObjectFromUri(output, True)\n'
s += '\tfields=layer.pendingFields()\n'
s += '\texpectednames=[' + ','.join(["'" + unicode(f.name()) + "'"
for f in fields]) + ']\n'
s += '\texpectedtypes=[' + ','.join(["'" + unicode(f.typeName()) + "'"
for f in fields]) + ']\n'
s += '\tnames=[unicode(f.name()) for f in fields]\n'
s += '\ttypes=[unicode(f.typeName()) for f in fields]\n'
s += '\tself.assertEqual(expectednames, names)\n'
s += '\tself.assertEqual(expectedtypes, types)\n'
features = vector.features(layer)
numfeat = len(features)
s += '\tfeatures=processing.features(layer)\n'
s += '\tself.assertEqual(' + unicode(numfeat) + ', len(features))\n'
if numfeat > 0:
feature = features.next()
attrs = feature.attributes()
s += '\tfeature=features.next()\n'
s += '\tattrs=feature.attributes()\n'
s += '\texpectedvalues=[' + ','.join(['"' + unicode(attr) + '"'
for attr in attrs]) + ']\n'
s += '\tvalues=[unicode(attr) for attr in attrs]\n'
s += '\tself.assertEqual(expectedvalues, values)\n'
s += "\twkt='" + unicode(feature.geometry().exportToWkt()) + "'\n"
s += '\tself.assertEqual(wkt, \
unicode(feature.geometry().exportToWkt()))'
strhash = hashlib.sha224(dataset.ReadAsArray(0).data).hexdigest()
dlg = ShowTestDialog(s)
results[out.name] = {
'type': 'rasterhash',
'hash': strhash
}
elif isinstance(out, OutputVector):
filename = token[1:-1]
schema, filepath = extractSchemaPath(filename)
results[out.name] = {
'type': 'vector',
'name': filepath
}
if not schema:
results[out.name]['location'] = '[The expected result data is not in the testdata directory. Please write it to processing/tests/testdata/expected. Prefer gml files.]'
definition['results'] = results
dlg = ShowTestDialog(yaml.dump([definition], default_flow_style=False))
dlg.exec_()
@ -124,6 +199,7 @@ class ShowTestDialog(QDialog):
self.setWindowTitle(self.tr('Unit test'))
layout = QVBoxLayout()
self.text = QTextEdit()
self.text.setFontFamily("monospace")
self.text.setEnabled(True)
self.text.setText(s)
layout.addWidget(self.text)