> Is there still a need to perform parallelisation for the mentioned software
> components by other approaches?

The multi-processing support by the Coccinelle software triggers some
development challenges.
If data should be shared between started (background) processes,
an external system need to be selected for the desired storage service.
I got into the mood then to send these data eventually also through evolving
message queue interfaces as the attached script for the semantic patch language
demonstrates another data processing approach.


elfring@Sonne:~/Projekte/Linux/next-patched> spatch 
~/Projekte/Coccinelle/janitor/list_duplicate_statement_pairs_from_if_branches10.cocci
 drivers/media/dvb-frontends/stv0297.c
…
statement1|statement2|"function name"|"source file"|incidence
dprintk ( "%s: readreg error (reg == 0x%02x, ret == %i)\n" , __func__ , reg , 
ret ) ;|return - 1 ;|stv0297_readreg|drivers/media/dvb-frontends/stv0297.c|3
dprintk ( "%s: readreg error (reg == 0x%02x, ret == %i)\n" , __func__ , reg1 , 
ret ) ;|return - 1 ;|stv0297_readregs|drivers/media/dvb-frontends/stv0297.c|3


Such a simple test case works because it stays within known default system 
limits.
If more questionable source code combinations should be analysed,
it can be needed to increase the configuration parameter “msg_max” considerably.
How do you think about to try any further fine-tuning out in affected areas?

Regards,
Markus
@initialize:python@
@@
import io, posix_ipc, json, sys
sys.stderr.write("Creation of a message queue\n"
                 "QUEUE_MESSAGES_MAX_DEFAULT: %d\n"
                 % (posix_ipc.QUEUE_MESSAGES_MAX_DEFAULT))
# See also:
# * man mq_overview
# * 
https://stackoverflow.com/questions/32757046/is-it-possible-to-open-message-queue-in-linux-with-huge-number-of-elements
mq = posix_ipc.MessageQueue(None, posix_ipc.O_CREX)
sys.stderr.write("A message queue was created.\n")

def store_statements(fun, source, s1, s2):
    """Send data for the service."""
    records = []

    for place in source:
       
records.append('{"name":%s,"file":%s,"line":%s,"column":%s,"s1":%s,"s2":%s}'
                      % (json.dumps(fun),
                         json.dumps(place.file),
                         json.dumps(place.line),
                         json.dumps(int(place.column) + 1),
                         json.dumps(s1),
                         json.dumps(s2)))

    result = "[\n"
    result += ",\n".join(records)
    result += "\n]"
    mq.send(bytes(result), 0)

@searching exists@
identifier work;
statement s1, s2;
position pos;
type T;
@@
 T work(...)
 {
 ... when any
 if (...)
 {
 ... when any
 s1@pos
 s2
 }
 ... when any
 }

@script:python collection@
fun << searching.work;
s1 << searching.s1;
s2 << searching.s2;
place << searching.pos;
@@
store_statements(fun, place, s1, s2)

@finalize:python@
@@
if mq.current_messages > 0:
   mapping = {}

   def insert(x):
       """Add data to an internal table."""
       key = x["name"], x["file"], x["line"], x["column"]
       if key in mapping:
          sys.stderr.write("""A duplicate key was passed.
function: %s
file: %s
line: %s
column: %d
""" % key)
          raise RuntimeError
       else:
          mapping[key] = x["s1"], x["s2"]

   def data_import():
      while True:
         try:
            for v in json.loads(mq.receive(0)[0]):
               insert(v)
         except posix_ipc.BusyError:
            break

   data_import()
   from collections import Counter
   counts = Counter()

   for k, v in mapping.items():
      counts[(v[0], v[1], k[0], k[1])] += 1

   delimiter = "|"
   duplicates = {}

   for k, v in counts.items():
      if v > 1:
         duplicates[k] = v

   if len(duplicates.keys()) > 0:
      sys.stdout.write(delimiter.join(["statement1",
                                       "statement2",
                                       '"function name"',
                                       '"source file"',
                                       "incidence"]))
      sys.stdout.write("\r\n")

      for k, v in duplicates.items():
         sys.stdout.write(delimiter.join([k[0], k[1], k[2], k[3], str(v)]))
         sys.stdout.write("\r\n")
   else:
      sys.stderr.write("Duplicate statements were not determined from "
                       + str(len(records)) + " records.\n")
      sys.stderr.write(delimiter.join(["statement1",
                                       "statement2",
                                       '"function name"',
                                       '"source file"',
                                       "line"]))
      sys.stderr.write("\r\n")

      for k, v in counts.items():
         sys.stdout.write(delimiter.join([v[0], v[1], k[1], k[0], k[2]]))
         sys.stderr.write("\r\n")
else:
   sys.stderr.write("No result for this analysis!\n")
_______________________________________________
Cocci mailing list
Cocci@systeme.lip6.fr
https://systeme.lip6.fr/mailman/listinfo/cocci

Reply via email to