--- /dev/null
+# import re
+# import random
+# import asyncio
+# import functools
+# import random
+# import time
+# import os
+# import sys
+# from testing import Client
+# from testing import default_test_setup
+# from testing import gen_data
+# from testing import gen_points
+# from testing import gen_series
+# from testing import InsertError
+# from testing import PoolError
+# from testing import QueryError
+# from testing import run_test
+# from testing import Series
+# from testing import Server
+# from testing import ServerError
+# from testing import SiriDB
+# from testing import TestBase
+# from testing import UserAuthError
+# from querybouwer.querybouwer import Querybouwer
+# from querybouwer.k_map import k_map
+# #from querybouwer.validate import SiriRipOff, compare_result
+# from cmath import isfinite
+# from math import nan
+
+# from testing.constants import SIRIDBC
+# print(SIRIDBC)
+# sys.path.append('../grammar/')#os.path.join(SIRIDBC, '/grammar'))
+# from pygrammar.grammar import SiriGrammar
+
+
+# M, N = 20, 70
+# #DATA = {str(a).zfill(6): [[b, random.randint(0, 20)] for b in range(N)] for a in range(M)}
+# DATA = {str(a).zfill(6): [[b*N+a, random.randint(0, 20)] for b in range(N)] for a in range(M)}
+# #DATA = {str(a).zfill(6): [[b, nan if b > 15 else float(random.randint(0, 20))] for b in range(N)] for a in range(M)}
+
+# #client_validate = SiriRipOff()
+# #client_validate.groups = {'GROUP': re.compile('.*')}
+# #client_validate.update(DATA)
+
+
+# class TestGrammar(TestBase):
+# title = 'Test select'
+
+# async def test_main(self):
+# qb = Querybouwer(SiriGrammar, {'regex_map': k_map,
+# 'max_list_n_map': {
+# 'series_match': 1,
+# 'aggregate_functions': 1,
+# 'select_aggregate': 1
+# },
+# #'default_list_n': 2,
+# 'replace_map': {
+# 'k_prefix': '',
+# 'k_suffix': '',
+# 'k_where': '',
+# 'after_expr': '',
+# 'before_expr': '',
+# 'between_expr': '',
+# 'k_merge': '',
+# 'r_singleq_str': '',
+
+# #'f_median': '',
+# #'f_median_low': '',
+# #'f_median_high': '',
+# 'k_now': '',
+# 'r_float': '',
+# 'r_time_str': '',
+# #'r_grave_str': '',
+# #'f_filter': '',
+# }
+# })
+
+# for q in qb.generate_queries('select_stmt'):
+# #if '~' in q: continue
+# #if 'between now' in q: continue
+# #if "000000" in q or '1970' in q or '*' in q or '==' in q or '>=' in q or '<' in q: continue
+
+# #if 'filter' in q and 'derivative' in q.split('filter', 1)[0]: continue
+# q = ' suffix "a", '.join(q.split(' , ', 1))
+# #a, b = q.split(' , ', 1)
+# #q = a + ' suffix "b",'+b
+
+# #q = 'select * from /.*/ '+q
+# try:
+# res0 = await self.client0.query(q)
+# except:
+# pass
+# #res1 = client_validate.query(q)
+# #self.assertFalse(compare_result(res0, res1))
+
+# async def test_where(self):
+# qb = Querybouwer(SiriGrammar, {'regex_map': k_map,
+# 'max_list_n_map': {
+# 'series_match': 1,
+# 'aggregate_functions': 1},
+# 'replace_map': {
+# 'k_prefix': '',
+# 'k_suffix': 'suffix',
+# #'k_where': '',
+# 'after_expr': '',
+# 'before_expr': '',
+# 'between_expr': '',
+# 'k_merge': '',
+# 'r_singleq_str': '',
+# }
+# })
+# for q in qb.generate_queries('select_stmt'):
+# if '~' in q: continue
+# if 'between now' in q: continue
+# print(q)
+# res0 = await self.client0.query(q)
+# #res1 = client_validate.query(q)
+# #self.assertFalse(compare_result(res0, res1))
+
+# async def test_between(self):
+# qb = Querybouwer(SiriGrammar, {'regex_map': k_map,
+# 'max_list_n_map': {
+# 'series_match': 1,
+# 'aggregate_functions': 1},
+# 'replace_map': {
+# 'k_prefix': '',
+# 'k_suffix': '',
+# 'k_where': '',
+# 'after_expr': '',
+# 'before_expr': '',
+# #'between_expr': '',
+# 'k_merge': '',
+# 'r_singleq_str': '',
+# }
+# })
+# for q in qb.generate_queries('select_stmt'):
+# if '~' in q: continue
+# if 'between now' in q: continue
+# print(q)
+# res = await self.client0.query(q)
+# #self.assertFalse(validate_query_result(q, res))
+
+# async def test_merge(self):
+# qb = Querybouwer(SiriGrammar, {'regex_map': k_map,
+# 'max_list_n_map': {
+# 'series_match': 1,
+# 'aggregate_functions': 1},
+# 'replace_map': {
+# 'k_prefix': '',
+# 'k_suffix': '',
+# 'k_where': '',
+# 'after_expr': '',
+# 'before_expr': '',
+# 'between_expr': '',
+# #'k_merge': '',
+# 'r_singleq_str': '',
+# }
+# })
+# for q in qb.generate_queries('select_stmt'):
+# if '~' in q: continue
+# if 'between now' in q: continue
+# print(q)
+# res = await self.client0.query(q)
+
+
+# @default_test_setup(1)
+# async def run(self):
+# await self.client0.connect()
+
+# for k, v in sorted(DATA.items()):
+# await self.client0.insert({k: v})
+# await self.client0.query('create group `GROUP` for /.*/')
+# #time.sleep(2)
+
+# await self.test_main()
+# #await self.test_where()
+# #await self.test_between()
+# #await self.test_merge()
+
+# self.client0.close()
+
+# return False
+
+
+# if __name__ == '__main__':
+# SiriDB.LOG_LEVEL = 'CRITICAL'
+# Server.HOLD_TERM = True
+# Server.MEM_CHECK = True
+# Server.BUILDTYPE = 'Debug'
+# run_test(TestAll())
+# #run_test(TestSelect())
--- /dev/null
+# import sys
+# from _collections import defaultdict
+# from statistics import mean, median, median_low, median_high, variance, pvariance
+# from datetime import datetime
+# from numpy import nan, inf, isnan, isfinite
+# import time
+# import re
+# sys.path.append('../grammar')
+# from pygrammar.grammar import SiriGrammar # nopep8
+# # import decimal
+# # getcontext().prec = 12
+# PRECISION = 9
+
+# expr_map = {
+# '==': lambda a, b: a==b,
+# '!=': lambda a, b: a!=b,
+# '<=': lambda a, b: a<=b,
+# '>=': lambda a, b: a>=b,
+# '<': lambda a, b: a<b,
+# '>': lambda a, b: a>b,
+# '!~': lambda a, b: True,
+# '~': lambda a, b: True,
+# }
+# prop_map = {
+# 'length': lambda k, v: len(v),
+# 'pool': lambda k, v: False,
+# 'name': lambda k, v: k,
+# 'start': lambda k, v: v[0][0],
+# 'end': lambda k, v: v[-1][0],
+# 'type': lambda k, v: 'integer' if type(v[0][1])==int else 'float',
+# }
+# setop_map = {
+# 'k_union': lambda a, b: a | b,
+# 'c_difference': lambda a, b: a - b,
+# 'k_symmetric_difference': lambda a, b: a ^ b,
+# 'k_intersection': lambda a, b: a & b,
+# }
+
+# seconds_map = {'s': 1, 'm': 60, 'h': 3600, 'd': 86400, 'w': 604800}
+
+
+# def difference(pts, ts):
+# if ts:
+# return [[t, 0 if len(v)==1 else v[-1]-v[0]] for t, v in sorted(binfun(pts, ts[0]))]#KLOPT DIT? (nan-nan) 0 if len(v)==1 else
+# else:
+# return [[t, v1-v0] for (_, v0), (t, v1) in zip(pts[:-1], pts[1:])]
+
+
+# def derivative(pts, ts):
+# ts = (ts[0] if ts else 1)
+# return [[t1, inf if (t0==t1 and (v1-v0)>0) else \
+# -inf if (t0==t1 and (v1-v0)<0) else \
+# nan if t0==t1 else \
+# ts*(v1-v0)/(t1-t0)#round(ts*(v1-v0)/(t1-t0), PRECISION)
+# ] for (t0, v0), (t1, v1) in zip(pts[:-1], pts[1:])]
+
+
+# def pvariance_(vs):
+# nans = [a for a in vs if not isfinite(a)]
+# return nan if nans else 0 if len(vs) == 1 else pvariance(vs)
+
+
+# def binfun(pts, size):
+# bins = defaultdict(list)
+# for t, v in pts:
+# bins[(t-1)//size*size+size].append(v)
+# return bins.items()
+
+
+# def binfunmerge(series, size):
+# bins = defaultdict(list)
+# for _, pts in sorted(series.items(), reverse=True):
+# for t, v in pts:
+# bins[(t-1)//size*size+size].append(v)
+# return bins.items()
+
+
+# def mergefun(series):
+# return sorted([tv for _, pts in sorted(series.items(), reverse=True) for tv in pts], key=lambda a: a[0])
+
+# funmap = {
+# 'f_points': lambda pts, ts: pts,
+# 'f_sum': lambda pts, ts: sorted([t, sum(v)] for t, v in binfun(pts, ts[0])),
+# 'f_mean': lambda pts, ts: sorted([t, mean(v)] for t, v in binfun(pts, ts[0])),
+# 'f_median': lambda pts, ts: sorted([t, median(v)] for t, v in binfun(pts, ts[0])),
+# 'f_median_low': lambda pts, ts: sorted([t, median_low(v)] for t, v in binfun(pts, ts[0])),
+# 'f_median_high': lambda pts, ts: sorted([t, median_high(v)] for t, v in binfun(pts, ts[0])),
+# 'f_min': lambda pts, ts: sorted([t, min(v)] for t, v in binfun(pts, ts[0])),
+# 'f_max': lambda pts, ts: sorted([t, max(v)] for t, v in binfun(pts, ts[0])),
+# 'f_count': lambda pts, ts: sorted([t, len(v)] for t, v in binfun(pts, ts[0])),
+# 'f_variance': lambda pts, ts: sorted([t, 0 if len(v) ==1 else variance(v)] for t, v in binfun(pts, ts[0])),
+# 'f_pvariance': lambda pts, ts: sorted([t, pvariance_(v)] for t, v in binfun(pts, ts[0])),
+# 'f_difference': difference,
+# 'f_derivative': derivative,
+# 'f_filter': lambda pts, ts: [tv for tv in pts if ts[0](tv[1], ts[1])],#round(tv[1], 6)
+# }
+
+# funmapmerge = {
+# 'f_points': lambda series, ts: mergefun(series),
+# 'f_sum': lambda series, ts: sorted([t, sum(v)] for t, v in binfunmerge(series, ts[0])),
+# 'f_mean': lambda series, ts: sorted([t, mean(v)] for t, v in binfunmerge(series, ts[0])),
+# 'f_median': lambda series, ts: sorted([t, median(v)] for t, v in binfunmerge(series, ts[0])),
+# 'f_median_low': lambda series, ts: sorted([t, median_low(v)] for t, v in binfunmerge(series, ts[0])),
+# 'f_median_high': lambda series, ts: sorted([t, median_high(v)] for t, v in binfunmerge(series, ts[0])),
+# 'f_min': lambda series, ts: sorted([t, min(v)] for t, v in binfunmerge(series, ts[0])),
+# 'f_max': lambda series, ts: sorted([t, max(v)] for t, v in binfunmerge(series, ts[0])),
+# 'f_count': lambda series, ts: sorted([t, len(v)] for t, v in binfunmerge(series, ts[0])),
+# 'f_variance': lambda series, ts: sorted([t, round(variance(v), PRECISION) if len(v) > 1 else 0] for t, v in binfunmerge(series, ts[0])),
+# 'f_pvariance': lambda series, ts: sorted([t, round(pvariance(v), PRECISION) if len(v) > 1 else 0] for t, v in binfunmerge(series, ts[0])),
+# 'f_difference': lambda series, ts: difference(mergefun(series), ts),
+# 'f_derivative': lambda series, ts: derivative(mergefun(series), ts),
+# 'f_filter': lambda series, ts: [tv for tv in mergefun(series) if ts[0](tv[1], ts[1])],
+# }
+
+
+# class SiriRipOff(dict):
+# groups = {}
+
+# def _time_expr(self, e):
+# for e1 in e.children:
+# if e1.children[0].element.name == 'r_integer':
+# return int(e1.string)
+# elif e1.children[0].element.name == 'r_time_str':
+# return int(e1.string[:-1])*seconds_map[e1.string[-1:]]
+# elif e1.children[0].element.name == 'k_now':
+# return int(time.time())
+# else:
+# alist = e1.string[1:-1].split()
+# Y, M, D = map(int, alist[0].split('-'))
+# if len(alist) == 2:
+# h, m, s = map(int, (alist[1]+':0:0').split(':')[:3])
+# dt = datetime(Y, M, D, h, m, s)
+# else:
+# dt = datetime(Y, M, D)
+# return int(time.mktime(dt.timetuple()))
+
+# def _aggrfun(self, e):
+# ename = getattr(e.element, 'name', None)
+# texprs = []
+# if ename == 'f_filter':
+# oname = e.children[2].children[0].element.name
+# op = expr_map[e.children[2].string] if oname == 'str_operator' else expr_map['==']
+# for e1 in e.children[-2].children:
+# if getattr(e1.element, 'name', None) == 'r_integer':
+# val = int(e1.string)
+# elif getattr(e1.element, 'name', None) == 'r_float':
+# val = float(e1.string)
+# else:
+# assert 0
+# texprs = [op, val]
+# else:
+# for e1 in e.children:
+# if getattr(e1.element, 'name', None) == 'time_expr':
+# texprs.append(self._time_expr(e1))
+# else:
+# for e2 in e1.children:
+# if getattr(e2.element, 'name', None) == 'time_expr':
+# texprs.append(self._time_expr(e2))
+
+# return ename, texprs
+
+# def _from(self, e):
+# seriessets = []
+# operators = []
+# for e1 in [a.children[0] for a in e.children]:
+# ename = getattr(e1.element, 'name', None)
+# if ename == 'series_name':
+# seriessets.append({e1.string[1:-1]} if e1.string[1:-1] in self else set())
+# elif ename == 'series_re':
+# r = re.compile(e1.string[1:-1])
+# seriessets.append({name for name in self if r.match(name)})
+# elif ename == 'group_match':
+# r = self.groups[e1.string[1:-1]]
+# seriessets.append({name for name in self if r.match(name)})
+# else:
+# operators.append(setop_map[ename])
+
+# _series = seriessets[0]
+# for s, o in zip(seriessets[1:], operators):
+# _series = o(_series, s)
+# return _series
+
+# def _where(self, e):
+# wfun = []
+# for e1 in e.children[0].children[1].children[0].children:
+# prop, exp, val = e1.children
+# vtyp = getattr(val.element, 'name', None)
+# if vtyp == 'string':
+# val = val.string[1:-1]
+# elif vtyp == 'int_expr':
+# val = int(val.string)
+# elif vtyp == 'time_expr':
+# val = self._time_expr(val)
+# else:
+# val = val.string
+
+# wfun.append(lambda k, v: expr_map[exp.string](prop_map[prop.string](k, v), val))
+
+# return lambda k, v: sum(fun(k, v) for fun in wfun)==len(wfun)
+
+# def _aggr(self, e):
+# aggr = []
+# for i, e1 in enumerate(e.children):
+# if i%2==1: continue
+# _prefix = ''
+# _suffix = ''
+# _aggrfuns = []
+# for i, e2 in enumerate(e1.children[0].children):
+# if i%2==1: continue
+# fun, ts = self._aggrfun(e2.children[0])
+# _aggrfuns.append([fun, ts])
+
+# for e2 in e1.children[1:]:
+# if e2.children[0].element.name == 'prefix_expr':
+# _prefix = e2.children[0].children[1].string[1:-1]
+# elif e1.children[0].element.name == 'suffix_expr':
+# _suffix = e2.children[0].children[1].string[1:-1]
+# aggr.append([_aggrfuns, _prefix, _suffix])
+# return aggr
+
+# def _merge(self, e):
+# mergeas = e.children[0].children[2].string[1:-1]
+# _aggrfuns = []
+# if e.children[0].children[-1].children[0].children:
+# for i, e1 in enumerate(e.children[0].children[-1].children[0].children[1].children):
+# if i%2==1: continue
+# fun, ts = self._aggrfun(e1.children[0])
+# _aggrfuns.append([fun, ts])
+# return mergeas, _aggrfuns or [['f_points', None]]
+
+
+# def select_stmt(self, e):
+# aggr = self._aggr(e.children[1])
+# names = self._from(e.children[3])
+# wfun, merge, after, before = None, None, None, None
+# for e1 in e.children[4:]:
+# lname = getattr(e1.children[0].children[0].element, 'name', None)
+# if lname == 'k_where':
+# wfun = self._where(e1)
+# elif lname == 'k_merge':
+# merge = self._merge(e1)
+# elif lname == 'after_expr':
+# after = self._time_expr(e1.children[0].children[0].children[1])
+# elif lname == 'before_expr':
+# before = self._time_expr(e1.children[0].children[0].children[1])
+# elif lname == 'between_expr':
+# after = self._time_expr(e1.children[0].children[0].children[1])
+# before = self._time_expr(e1.children[0].children[0].children[3])
+
+# series = {}
+# for name in names:
+# pts = self[name]
+# if wfun and not wfun(name, pts): continue
+# if after is not None and before:
+# pts = [[t, v] for t, v in pts if after <= t < before]
+# elif after is not None:
+# pts = [[t, v] for t, v in pts if after <= t]
+# elif before is not None:
+# pts = [[t, v] for t, v in pts if t < before]
+
+# for aggrs, prefix, suffix in aggr:
+# for funname, ts in aggrs:
+# pts = funmap[funname](pts, ts)
+# series[prefix+name+suffix] = pts
+
+# lname = getattr(e.children[-1].children[0].element, 'name', None)
+# if merge:
+# mergeas, aggrs = merge
+# for funname, ts in aggrs[:1]:
+# pts = funmapmerge[funname](series, ts)
+# for funname, ts in aggrs[1:]:
+# pts = funmap[funname](pts, ts)
+
+# return {mergeas: pts}
+
+# return series
+
+# def query(self, q):
+# res = siri_grammar.parse(q)
+# for e in res.tree.children[0].children[0].children[0].children:
+# ename = getattr(e.element, 'name', None)
+# return getattr(self, ename)(e)
+
+
+# def compare_result(res0, res1):
+# if res0.keys()!=res1.keys():
+# return True
+
+# for k, v in res0.items():
+# v_val = res1[k]
+# if v == v_val: continue
+# n_ok = sum(v0==v1 or round(v0-v1, 3)==0 or (isnan(v0) and isnan(v1)) for (t0, v0), (t1, v1) in zip(v, v_val))
+# if n_ok == len(v) == len(v_val): continue
+# print(n_ok)
+# print('s', len(v), v)
+# print('k', len(v_val), v_val)
+# print(len(v), len(v_val))
+# return True
+# return False
\ No newline at end of file
--- /dev/null
+max_repeat_n_map = {}
+replace_map = {'r_singleq_str': ''}
+max_list_n_map = {'series_match': 2, 'aggregate_functions': 2}
+k_map = {
+ 'r_doubleq_str': {
+ 'k_as': '"MERGED"',
+ 'k_suffix': '"SUFFIX"',
+ 'k_prefix': '"PREFIX"',
+ 'series_name': '"000000"',
+ 'k_filter': 10, # '"10"',
+ 'uuid': '"koos-VirtualBox:9010"',
+ 'k_name': '"000000"',
+ 'k_user': '"USER"',
+ 'k_password': '"PASSWORD"',
+ 'k_status': '"running"',
+ 'k_expression': '"/.*/"',
+ 'k_address': '"localhost"',
+ 'k_buffer_path': '"BUFFER_PATH"',
+ 'k_dbpath': '"DBPATH"',
+ 'k_uuid': '"UUID"',
+ 'k_version': '"VERSION"',
+ 'k_reindex_progress': '"REINDEX_PROGRESS"',
+ 'k_sync_progress': '"SYNC_PROGRESS"',
+ 'k_timezone': '"NAIVE"',
+ 'k_ip_support': '"ALL"',
+ 'k_libuv': '"1.8.0"',
+ 'k_server': '"SERVER"',
+
+ 'aggregate_functions': '"1970-1-1 1:00:10"',
+ 'k_start': '"1970-1-1 1:00:00"',
+ 'k_after': '"1970-1-1 1:00:00"',
+ 'k_between': '"1970-1-1 1:00:00"',
+ 'k_before': '"1970-1-1 1:01:00"',
+ 'k_and': '"1970-1-1 1:01:00"',
+ 'k_end': '"1970-1-1 1:01:00"',
+ },
+ 'r_integer': {
+ 'k_series': 0, # GROUPS
+ 'k_active_handles': 0, # SERVERS
+ 'k_buffer_size': 0, # SERVERS
+ 'k_port': 9000, # SERVERS
+ 'k_startup_time': 0, # SERVERS
+ 'k_max_open_files': 0, # SERVERS
+ 'k_mem_usage': 0, # SERVERS
+ 'k_open_files': 0, # SERVERS
+ 'k_received_points': 0, # SERVERS
+ 'k_uptime': 0, # SERVERS,
+ 'k_servers': 0, # POOLS
+ 'k_limit': 10,
+ 'k_sid': 0,
+ 'k_pool': 0,
+ 'k_filter': 10,
+ 'k_size': 10,
+ 'k_length': 10,
+ 'aggregate_functions': 10,
+ 'k_start': 0,
+ 'k_after': 0,
+ 'k_between': 0,
+ 'k_before': 60,
+ 'k_and': 60,
+ 'k_end': 60,
+ },
+ 'r_float': {
+ 'k_filter': 10.0,
+ 'k_drop_threshold': 0.99},
+ 'r_time_str': {
+ 'aggregate_functions': '10s',
+ 'k_start': '0d',
+ 'k_after': '0d',
+ 'k_between': '0d',
+ 'k_before': '1m',
+ 'k_and': '1m',
+ 'k_end': '1m'},
+ 'r_uuid_str': {
+ 'r_uuid_str': '"UUID"'},
+ 'r_uinteger': {
+ },
+ 'r_grave_str': {'r_grave_str': '`000000`'},
+ 'r_regex': {
+ 'r_regex': '/.*/'
+ },
+}
--- /dev/null
+import os
+import sys
+
+
+class QueryGenerator(list):
+
+ def __init__(self, grammar, maps={}):
+ self.grammar = grammar
+ self.ignores = maps.get('replace_map', {})
+ self.max_repeat_n_map = maps.get('max_repeat_n_map', {})
+ self.max_list_n_map = maps.get('max_list_n_map', {})
+ self.default_list_n = maps.get('default_list_n', 1)
+ self.regex_map = maps.get('regex_map', {})
+
+ def generate_queries(self, ename='START'):
+ try:
+ ele = getattr(self.grammar, ename)
+ except AttributeError:
+ print('{} element niet gevonden in tree'.format(ename))
+ else:
+ for q in self._addq([ele]):
+ yield ' '.join(map(str, q)).strip()
+
+ def _addq(self, q):
+ for i, e in enumerate(q):
+ if isinstance(e, str) or isinstance(e, int) or \
+ isinstance(e, float):
+ continue
+
+ ename = getattr(e, 'name', None)
+ iv = self.ignores.get(ename)
+ if iv is not None:
+ q[i] = iv
+ break
+
+ self.append(ename)
+ for q1 in getattr(self, '_on_'+e.__class__.__name__)(q, i):
+ for q2 in self._addq(q1):
+ yield q2
+ self.pop()
+ q[i] = e
+ break
+ else:
+ yield q
+
+ def _on_Keyword(self, q, i):
+ q[i] = q[i]._keyword
+ yield q
+
+ def _on_Regex(self, q, i):
+ re_map = self.regex_map[self[-1]]
+ for ename in self[::-1]:
+ val = re_map.get(ename)
+ if val is not None:
+ q[i] = val
+ break
+ else:
+ print('geen waarde gevonden voor:')
+ print(self[-1])
+ print(q[:i+1])
+ print(i, len(self), self)
+ print(self.regex_map)
+ assert 0
+
+ yield q
+
+ def _on_Token(self, q, i):
+ q[i] = q[i]._token
+ yield q
+
+ def _on_Tokens(self, q, i):
+ for e1 in q[i]._tokens:
+ q[i] = e1
+ yield q
+
+ def _on_Sequence(self, q, i):
+ q = q[:i]+q[i]._elements+q[i+1:]
+ yield q
+
+ def _on_List(self, q, i):
+ if q[i]._min == 0:
+ q0 = q[:i]+q[i+1:]
+ yield q0
+
+ if q[i]._max == 1:
+ q0 = q[:i]+list(q[i]._elements)[:1]+q[i+1:]
+ yield q0
+ else:
+ name = getattr(q[i], 'name', getattr(q[i]._element, 'name', None))
+ n = self.max_list_n_map.get(name, self.default_list_n)
+ # print(n, getattr(q[i], 'name', None))
+ if q[i]._max:
+ n = min(n, q[i]._max)
+
+ eles = list(q[i]._elements)[:1] + \
+ [q[i]._delimiter, list(q[i]._elements)[0]]*(n-1)
+ # if getattr(q[i]._delimiter, '_token', None) == ',':
+ # eles = list(q[i]._elements)[:1] + \
+ # [q[i]._delimiter,
+ # list(q[i]._elements)[0]]*(self.max_list_n_simple-1)
+ # else:
+ # eles = list(q[i]._elements)[:1]+[q[i]._delimiter,
+ # list(q[i]._elements)[0]]*(self.max_list_n-1)
+ q0 = q[:i]+eles+q[i+1:]
+ yield q0
+
+ def _on_Repeat(self, q, i):
+ if q[i]._min == 0:
+ q0 = q[:i]+q[i+1:]
+ yield q0
+
+ if q[i]._max == 1:
+ q0 = q[:i]+list(q[i]._elements)[:1]+q[i+1:]
+ yield q0
+ else:
+ n = self.max_repeat_n_map.get(getattr(q[i], 'name', None), 1)
+ eles = list(q[i]._elements)[:1]*n
+ q0 = q[:i]+eles+q[i+1:]
+ yield q0
+
+ def _on_Optional(self, q, i):
+ q[i] = list(q[i]._elements)[0]
+ yield q
+ q[i] = ''
+ yield q
+
+ def _on_Choice(self, q, i):
+ for e1 in q[i]._elements:
+ q[i] = e1
+ yield q
+
+ def _on_Rule(self, q, i):
+ for e1 in q[i]._element._elements:
+ if e1.__class__.__name__ == 'Sequence' and sum(
+ e2.__class__.__name__ == 'This' for e2 in e1._elements):
+ continue
+
+ q[i] = e1
+ yield q
+
+ def _on_This(self, q, i):
+ pass
+
+
+if __name__ == '__main__':
+ import sys
+ from k_map import k_map
+ sys.path.append('../../grammar/')
+ from pygrammar.grammar import SiriGrammar # nopep8
+
+ qb = Querybouwer({
+ 'regex_map': k_map,
+ 'max_list_n_map': {
+ 'series_match': 1,
+ 'aggregate_functions': 1,
+ 'select_aggregate': 1
+ },
+ 'default_list_n': 2,
+ 'replace_map': {
+ 'k_prefix': '',
+ 'k_suffix': '',
+ 'k_where': '',
+ 'after_expr': '',
+ 'before_expr': '',
+ 'between_expr': '',
+ 'k_merge': '',
+ 'r_singleq_str': '',
+
+ # 'f_median': '',
+ # 'f_median_low': '',
+ # 'f_median_high': '',
+ 'k_now': '',
+ 'r_float': '',
+ 'r_time_str': '',
+ # 'r_grave_str': '',
+ # 'f_filter': '',
+ }
+ })
+
+ maps = {
+ 'replace_map': {
+ 'select_stmt': '',
+ # 'list_stmt': '',
+ # 'count_stmt': '',
+ # 'drop_stmt': '',
+ # 'alter_stmt': '',
+ # 'create_stmt': '',
+ # 'revoke_stmt': '',
+ # 'grant_stmt': '',
+ # 'show_stmt': '',
+ 'calc_stmt': '',
+
+ # 'k_prefix': '',
+ # 'k_suffix': '',
+ # 'k_where': '',
+ # 'after_expr': '',
+ # 'before_expr': '',
+ # 'between_expr': '',
+ # 'k_merge': '',
+ # 'k_limit': '',
+ # 'k_timeit': '',
+ 'r_comment': '',
+ 'r_singleq_str': '',
+ }, 'regex_map': k_map,
+ }
+
+ qb = Querybouwer(SiriGrammar, maps)
+ for q in qb.generate_queries(base_ele):
+ print(q)
\ No newline at end of file
from test_log import TestLog
from test_pipe_support import TestPipeSupport
from test_buffer import TestBuffer
+from test_grammar import TestGrammar
if __name__ == '__main__':
parse_args()
+ run_test(TestBuffer())
run_test(TestCompression())
+ run_test(TestGrammar())
run_test(TestGroup())
- run_test(TestList())
run_test(TestInsert())
+ run_test(TestList())
+ run_test(TestLog())
+ run_test(TestPipeSupport())
run_test(TestPool())
run_test(TestSelect())
run_test(TestSelectNano())
run_test(TestSeries())
run_test(TestServer())
run_test(TestUser())
- run_test(TestLog())
- run_test(TestPipeSupport())
- run_test(TestBuffer())
--- /dev/null
+import re
+import random
+import asyncio
+import functools
+import random
+import time
+import os
+import sys
+from math import nan
+from testing import default_test_setup
+from testing import run_test
+from testing import Server
+from testing import SiriDB
+from testing import TestBase
+from testing.constants import PYGRAMMAR_PATH
+from querygenerator.querygenerator import QueryGenerator
+from querygenerator.k_map import k_map
+sys.path.append(PYGRAMMAR_PATH)
+from grammar import SiriGrammar # nopep8
+
+
+def gen_simple_data(m, n):
+ series = {
+ str(a).zfill(6): [[b*n+a, random.randint(0, 20)] for b in range(n)]
+ for a in range(m)}
+ # series = {
+ # str(a).zfill(6): [[b, nan if b > 15 else float(random.randint(0, 20))]
+ # for b in range(n)] for a in range(m)}
+ return series
+
+
+def update_k_map_show(show):
+ kv = {a['name']: a['value'] for a in show['data']}
+ # for k, v in sorted(kv.items()):
+ # print('k_'+k in k_map['r_doubleq_str'] or
+ # 'k_'+k in k_map['r_integer'] or
+ # 'k_'+k in k_map['r_float'], k, v)
+ k_map['r_integer']['k_active_handles'] = kv['active_handles']
+ k_map['r_doubleq_str']['k_buffer_path'] = '"'+kv['buffer_path']+'"'
+ k_map['r_integer']['k_buffer_size'] = kv['buffer_size']
+ k_map['r_doubleq_str']['k_dbpath'] = '"'+kv['dbpath']+'"'
+ k_map['r_float']['k_drop_threshold'] = kv['drop_threshold']
+ k_map['r_doubleq_str']['k_ip_support'] = '"'+kv['ip_support']+'"'
+ k_map['r_doubleq_str']['k_libuv'] = '"'+kv['libuv']+'"'
+ k_map['r_integer']['k_max_open_files'] = kv['max_open_files']
+ k_map['r_integer']['k_mem_usage'] = kv['mem_usage']
+ k_map['r_integer']['k_open_files'] = kv['open_files']
+ k_map['r_integer']['k_pool'] = kv['pool']
+ k_map['r_integer']['k_received_points'] = kv['received_points']
+ k_map['r_uinteger']['k_list_limit'] = kv['list_limit']
+ k_map['r_integer']['k_startup_time'] = kv['startup_time']
+ k_map['r_doubleq_str']['k_status'] = '"'+kv['status']+'"'
+ k_map['r_doubleq_str']['k_sync_progress'] = '"'+kv['sync_progress']+'"'
+ k_map['r_doubleq_str']['k_timezone'] = '"'+kv['timezone']+'"'
+ k_map['r_integer']['k_uptime'] = kv['uptime']
+ k_map['r_uuid_str']['r_uuid_str'] = kv['uuid']
+ k_map['r_doubleq_str']['k_server'] = '"'+kv['server']+'"'
+ k_map['r_doubleq_str']['uuid'] = '"'+kv['server']+'"'
+ k_map['r_doubleq_str']['k_version'] = '"'+kv['version']+'"'
+ k_map['r_uinteger']['k_port'] = kv['server'].split(':', 1)[1]
+ k_map['r_uinteger']['k_select_points_limit'] = \
+ kv['select_points_limit']
+ k_map['r_doubleq_str']['k_reindex_progress'] = \
+ '"'+kv['reindex_progress']+'"'
+
+
+class TestGrammar(TestBase):
+ title = 'Test from grammar'
+
+ async def test_create_stmt(self):
+ qb = QueryGenerator(SiriGrammar, {
+ 'regex_map': k_map,
+ 'replace_map': {'r_singleq_str': ''}})
+ for q in qb.generate_queries('create_stmt'):
+ await self.client0.query(q)
+
+ async def test_select_stmt(self):
+ qb = QueryGenerator(SiriGrammar, {
+ 'regex_map': k_map,
+ 'max_list_n_map': {
+ 'series_match': 1,
+ 'aggregate_functions': 1},
+ 'replace_map': {
+ 'k_prefix': '',
+ 'k_suffix': '',
+ 'k_where': '',
+ 'after_expr': '',
+ 'before_expr': '',
+ 'between_expr': '',
+ 'k_merge': '',
+ 'r_singleq_str': '',
+ }
+ })
+ for q in qb.generate_queries('select_stmt'):
+ if '/' in q:
+ # QueryError: Cannot use a string filter on number type.
+ continue
+ elif '~' in q:
+ # QueryError: Cannot use a string filter on number type.
+ continue
+ elif 'between now' in q:
+ continue
+ await self.client0.query(q)
+
+ async def test_revoke_stmt(self):
+ qb = QueryGenerator(SiriGrammar, {
+ 'regex_map': k_map,
+ 'replace_map': {'r_singleq_str': ''}})
+ for q in qb.generate_queries('revoke_stmt'):
+ await self.client0.query(q)
+
+ async def test_grant_stmt(self):
+ qb = QueryGenerator(SiriGrammar, {
+ 'regex_map': k_map,
+ 'replace_map': {'r_singleq_str': ''}})
+ for q in qb.generate_queries('grant_stmt'):
+ await self.client0.query(q)
+
+ async def test_alter_stmt(self):
+ qb = QueryGenerator(SiriGrammar, {
+ 'regex_map': k_map,
+ 'replace_map': {'r_singleq_str': ''}})
+ for q in qb.generate_queries('alter_stmt'):
+ if 'set address' in q:
+ continue # kan niet
+ if 'set port' in q:
+ continue # kan niet
+ if 'set timezone' in q:
+ continue # zelfde as vorig waarde error
+ # if 'set name' in q:
+ # continue # zelfde as vorig waarde error
+ if 'group' in q and 'name' in q:
+ continue # zelfde as vorig waarde error
+ await self.client0.query(q)
+
+ async def test_count_stmt(self):
+ qb = QueryGenerator(SiriGrammar, {
+ 'regex_map': k_map,
+ 'replace_map': {'r_singleq_str': ''}})
+ for q in qb.generate_queries('count_stmt'):
+ await self.client0.query(q)
+
+ async def test_list_stmt(self):
+ qb = QueryGenerator(SiriGrammar, {
+ 'regex_map': k_map,
+ 'replace_map': {'r_singleq_str': ''}})
+ for q in qb.generate_queries('list_stmt'):
+ await self.client0.query(q)
+
+ async def test_drop_stmt(self):
+ qb = QueryGenerator(SiriGrammar, {
+ 'regex_map': k_map,
+ 'replace_map': {'r_singleq_str': ''}})
+ for q in qb.generate_queries('drop_stmt'):
+ if 'drop server' in q:
+ continue # kan niet
+ if 'drop user' in q:
+ continue # user not exists err
+ if 'drop series' in q:
+ continue # and not 'where' in q: continue
+ await self.client0.query(q)
+
+ async def test_show_stmt(self):
+ qb = QueryGenerator(SiriGrammar, {
+ 'regex_map': k_map,
+ 'replace_map': {'r_singleq_str': ''}})
+ for q in qb.generate_queries('show_stmt'):
+ await self.client0.query(q)
+
+ @default_test_setup(1)
+ async def run(self):
+ await self.client0.connect()
+
+ # await self.db.add_pool(self.server1, sleep=2)
+
+ update_k_map_show(await self.client0.query('show'))
+
+ series = gen_simple_data(20, 70)
+
+ await self.client0.insert(series)
+ # await self.client0.query('create group `GROUP` for /.*/')
+
+ await self.test_create_stmt()
+
+ time.sleep(2)
+
+ await self.test_select_stmt()
+
+ await self.test_revoke_stmt()
+
+ await self.test_grant_stmt()
+
+ await self.test_alter_stmt()
+
+ await self.test_count_stmt()
+
+ await self.test_list_stmt()
+
+ await self.test_drop_stmt()
+
+ await self.test_show_stmt()
+
+ self.client0.close()
+
+ return False
+
+
+if __name__ == '__main__':
+ SiriDB.LOG_LEVEL = 'CRITICAL'
+ Server.HOLD_TERM = True
+ Server.MEM_CHECK = True
+ Server.BUILDTYPE = 'Debug'
+ run_test(TestGrammar())