1
0
mirror of https://github.com/samba-team/samba.git synced 2025-01-03 01:18:10 +03:00

traffic: generate sparser descriptions of conversations

Rather than building all the packets at this point, we stick to the
barest details of the packets (which is all the model gives us
anyway).

The advantage is that will take a lot less memory, which matters
because this process forks into many clients that were sharing and
mutate the conversation list.

Signed-off-by: Douglas Bagnall <douglas.bagnall@catalyst.net.nz>
Reviewed-by: Andrew Bartlett <abartlet@samba.org>
This commit is contained in:
Douglas Bagnall 2018-12-14 14:26:34 +13:00 committed by Douglas Bagnall
parent 7edf58dc58
commit 7b03e81c61
3 changed files with 10 additions and 15 deletions

View File

@ -1293,9 +1293,8 @@ class TrafficModel(object):
return c return c
def generate_conversations(self, scale, duration, replay_speed=1, def generate_conversation_sequences(self, scale, duration, replay_speed=1):
server=1, client=2): """Generate a list of conversation descriptions from the model."""
"""Generate a list of conversations from the model."""
# We run the simulation for ten times as long as our desired # We run the simulation for ten times as long as our desired
# duration, and take the section at the end. # duration, and take the section at the end.
@ -1319,7 +1318,7 @@ class TrafficModel(object):
% (n_packets, target_packets, len(conversations), scale)), % (n_packets, target_packets, len(conversations), scale)),
file=sys.stderr) file=sys.stderr)
conversations.sort() # sorts by first element == start time conversations.sort() # sorts by first element == start time
return seq_to_conversations(conversations) return conversations
def seq_to_conversations(seq, server=1, client=2): def seq_to_conversations(seq, server=1, client=2):

View File

@ -70,11 +70,6 @@ class TrafficLearnerTests(BlackboxTestCase):
for i, opts in enumerate((["--random-seed=3"], for i, opts in enumerate((["--random-seed=3"],
["--random-seed=4"], ["--random-seed=4"],
["--random-seed=3",
"--conversation-persistence=0.5"],
["--random-seed=3",
"--old-scale",
"--conversation-persistence=0.95"],
)): )):
with temp_file(self.tempdir) as output: with temp_file(self.tempdir) as output:
command = ([SCRIPT, MODEL, command = ([SCRIPT, MODEL,

View File

@ -250,9 +250,9 @@ def main():
logger.info(("Using the specified model file to " logger.info(("Using the specified model file to "
"generate conversations")) "generate conversations"))
conversations = model.generate_conversations(opts.scale_traffic, conversations = model.generate_conversation_sequences(opts.scale_traffic,
opts.duration, opts.duration,
opts.replay_rate) opts.replay_rate)
except ValueError: except ValueError:
logger.error(("Could not parse %s, which does not seem to be " logger.error(("Could not parse %s, which does not seem to be "
"a model generated by script/traffic_learner." "a model generated by script/traffic_learner."
@ -263,7 +263,7 @@ def main():
conversations = [] conversations = []
if debuglevel > 5: if debuglevel > 5:
for c in conversations: for c in traffic.seq_to_conversations(conversations):
for p in c.packets: for p in c.packets:
print(" ", p, file=sys.stderr) print(" ", p, file=sys.stderr)
@ -350,7 +350,7 @@ def main():
logger.info("Writing traffic summary") logger.info("Writing traffic summary")
summaries = [] summaries = []
for c in conversations: for c in traffic.seq_to_conversations(conversations):
summaries += c.replay_as_summary_lines() summaries += c.replay_as_summary_lines()
summaries.sort() summaries.sort()
@ -359,7 +359,8 @@ def main():
exit(0) exit(0)
traffic.replay(conversations, host, traffic.replay(traffic.seq_to_conversations(conversations),
host,
lp=lp, lp=lp,
creds=creds, creds=creds,
accounts=accounts, accounts=accounts,