25 from topologylistener
import db
26 from topologylistener
import flow_utils
27 from topologylistener
import message_utils
28 from topologylistener
import messageclasses
30 log = logging.getLogger(__name__)
32 dpid_test_marker = 0xfffe000000000000
33 dpid_protected_bits = 0xffffff0000000000
35 dpid_test_marker = 0xfffe000000000000
36 dpid_protected_bits = 0xffffff0000000000
38 cookie_test_data_flag = 0x0010000000000000
53 'switch-id': link_props.source.dpid,
54 'port-id': link_props.source.port},
56 'switch-id': link_props.dest.dpid,
57 'port-id': link_props.dest.port},
58 'props': link_props.props,
59 'time_create': link_props.time_create.as_java_timestamp(),
60 'time_modify': link_props.time_modify.as_java_timestamp()}
65 'link_props': request,
66 'clazz': messageclasses.CD_LINK_PROPS_PUT}
71 'lookup_mask': request,
72 'clazz': messageclasses.CD_LINK_PROPS_DROP}
76 payload = dict(fields)
78 'org.openkilda.messaging.command.system.FeatureToggleRequest')
84 'state':
'DISCOVERED',
87 'available_bandwidth': 1000}
88 payload.update(fields)
90 'clazz': messageclasses.MT_ISL,
93 'switch_id': isl.source.dpid,
94 'port_no': isl.source.port},
96 'switch_id': isl.dest.dpid,
97 'port_no': isl.dest.port}]})
106 message.update(fields)
108 'clazz': message_utils.MT_INFO,
114 if prefix
and prefix[-1] !=
'.':
116 return '{}{}'.
format(prefix, uuid.uuid1())
120 if number & dpid_protected_bits:
122 'Invalid switch id {}: use protected bits'.
format(number))
134 q =
'MATCH (:switch)-[self:isl|:link_props]->() RETURN self' 135 for data_set
in tx.run(q):
136 rel = data_set[
'self']
146 q =
'MATCH (a:switch) RETURN a' 147 batch = (x[
'a']
for x
in tx.run(q))
155 q =
'MATCH (a:link_props) RETURN a' 156 batch = (x[
'a']
for x
in tx.run(q))
159 is_test_dpid(node[rel])
for rel
in (
'src_switch',
'dst_switch')]
166 q_lookup =
'MATCH (:switch)-[a:flow]->(:switch) RETURN a' 167 q_delete =
'MATCH (:switch)-[a:flow]->(:switch) WHERE id(a)=$id DELETE a' 168 batch = (x[
'a']
for x
in tx.run(q_lookup))
169 for relation
in batch:
170 cookie = relation[
'cookie']
175 if not (cookie & cookie_test_data_flag):
181 tx.run(q_delete, {
'id': db.neo_id(relation)})
186 'MATCH (:switch)-[fs:flow_segment]->(:switch)\n' 187 'WHERE fs.flowid=$flow_id\n' 189 tx.run(q, {
'flow_id': flow_id})
194 return dpid & dpid_protected_bits == dpid_test_marker
198 value = dpid_str.replace(
':',
'')
199 return int(value, 16)
205 chunked = [a + b
for a, b
in zip(i, i)]
207 return ':'.join(chunked)
227 logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
230 return flow_utils.graph
233 for module, attr, replace
in (
235 current = getattr(module, attr)
236 if current
is replace:
240 module_data[attr] = current
241 setattr(module, attr, replace)
247 path = functools.partial(os.path.join, os.path.dirname(__file__))
254 separator =
'*-' * 29 +
'*' 256 message =
'\n'.join((
258 '{} Run test {}'.
format(prefix, self.id()),
261 logging.info(message)
264 with env.neo4j_connect.begin()
as tx:
270 self.assertTrue(result)
273 return env.neo4j_connect.begin()
276 with open(self.
path(
'data', name),
'rt')
as stream:
277 return json.load(stream)
285 def send(self, topic, payload=None):
296 def get(self, timeout=None):
297 log.debug(
'Send kafka record: %s', self.
record)
301 payload_visibility_limit = 60
302 _counter = itertools.count()
311 if not isinstance(payload, basestring):
312 payload = str(payload)
318 chunks.append(
'payload={!r}'.
format(payload))
320 chunks.append(
'payload="""{!r}""" ... more {} chars'.
format(
323 return 'KafkaSend{{{}}}'.
format(
', '.join(chunks))
def link_props_request(link_props)
def send(self, topic, payload=None)
def drop_persistent_data(self)
int payload_visibility_limit
def feed_service(self, message, can_fail=False)
def make_correlation_id(prefix='')
def open_neo4j_session(self)
def feed_isl_discovery(isl, fields)
def dpid_as_long(dpid_str)
def load_data(self, name)
def feature_toggle_request(fields)
def drop_db_link_props(tx)
def command(payload, fields)
def __init__(self, record)
def kafka_producer_backlog(self)
def isl_info_payload(isl, fields)
def clean_neo4j_test_data(tx)
def __init__(self, backlog_size=32)
def link_props_drop_payload(request)
def link_props_put_payload(request)
def drop_db_flow_segments(tx, flow_id)
def feed_message(message)
def get(self, timeout=None)
def __init__(self, topic, payload)
def make_datapath_id(number)
def reset_kafka_producer(self)