stevedlawrence commented on a change in pull request #88: Daffodil 1919 
separators
URL: https://github.com/apache/incubator-daffodil/pull/88#discussion_r208334986
 
 

 ##########
 File path: 
daffodil-runtime1/src/main/scala/org/apache/daffodil/processors/parsers/SequenceParserBases.scala
 ##########
 @@ -0,0 +1,380 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.daffodil.processors.parsers
+
+import org.apache.daffodil.processors.Evaluatable
+import java.io.PrintWriter
+import org.apache.daffodil.exceptions.UnsuppressableException
+import java.io.StringWriter
+import org.apache.daffodil.dsom.SchemaDefinitionDiagnosticBase
+import org.apache.daffodil.exceptions.Assert
+import org.apache.daffodil.processors.Success
+import org.apache.daffodil.processors.SequenceRuntimeData
+import org.apache.daffodil.processors.ElementRuntimeData
+import org.apache.daffodil.processors.TermRuntimeData
+import org.apache.daffodil.processors.Failure
+
+abstract class OrderedSequenceParserBase(
+  srd: SequenceRuntimeData,
+  protected val childParsers: Seq[SequenceChildParser])
+  extends CombinatorParser(srd) {
+  override def nom = "Sequence"
+
+  override lazy val runtimeDependencies: Seq[Evaluatable[AnyRef]] = Nil
+
+  override lazy val childProcessors: Seq[Parser] = childParsers
+
+  /**
+   * Parses (1) one iteration of an array with fixed/expression occurs count.
+   * (2) a model group (3) a scalar element.
+   *
+   * Returns a status indicating success/failure and the nature of that 
success/failure.
+   *
+   * No backtracking supported.
+   */
+  protected def parseOneWithoutPoU(
+    parserArg: SequenceChildParser,
+    trd: TermRuntimeData,
+    pstate: PState): ParseAttemptStatus
+
+  /**
+   * Parses one iteration of an array/optional element, and returns
+   * a status indicating success/failure and the nature of that 
success/failure.
+   *
+   * Supports speculative parsing via backtracking.
+   */
+  protected def parseOneWithPoU(
+    parser: RepeatingChildParser,
+    erd: ElementRuntimeData,
+    pstate: PState,
+    priorState: PState.Mark,
+    ais: GoArrayIndexStatus,
+    isBounded: Boolean): ParseAttemptStatus
+
+  protected def zeroLengthSpecialChecks(pstate: PState, 
wasLastChildZeroLength: Boolean): Unit
+
+  final protected def checkN(pstate: PState): Boolean = {
+    if (pstate.arrayPos > pstate.tunable.maxOccursBounds) {
+      PE(pstate, "Occurs count %s exceeds implementation maximum of %s.", 
pstate.arrayPos, pstate.tunable.maxOccursBounds)
+      false
+    } else true
+  }
+
+  /**
+   * This parse method is used for both separated and unseparated sequences.
+   */
+  override protected def parse(pstate: PState): Unit = {
+    val children = childParsers
+
+    var scpIndex = 0
+    pstate.mpstate.groupIndexStack.push(1L) // one-based indexing
+
+    val limit = children.length
+
+    var wasLastChildZeroLength = false
+
+    //
+    // This loop iterates over the children terms of the sequence
+    //
+    while ((scpIndex < limit) && (pstate.processorStatus eq Success)) {
+      val child = children(scpIndex)
+      child match {
+        case parser: RepeatingChildParser => {
+          //
+          // The sequence child is an array/repeating element (or ooptional
+          // element as the runtime doesn't distinguish them.)
+          //
+          //
+          val min = parser.minRepeats(pstate)
+          val max = parser.maxRepeats(pstate)
+          val isBounded = parser.isBoundedMax(max)
+          val erd = parser.trd.asInstanceOf[ElementRuntimeData]
+
+          parser.startArray(pstate)
+
+          //
+          // There are two kinds of loops. Arrays which have points of 
uncertainty (PoU)
+          // where speculative parsing is used to determine how many 
occurrences,
+          // and specified-number of occurrences, where a number is known or 
is computed.
+          //
+          parser.hasPoU match {
+            case true => {
+              //
+              // This case for array/optionals where the number of occurences 
is
+              // determined by speculative parsing. OCK=implicit with 
min/maxOccurs
+              // different, or OCK=parsed.
+              //
+
+              //
+              // The beforeArrayState will be assigned the priorState before 
the whole array
+              // This is the same object as the priorState before the first
+              // occurrence.
+              //
+              var beforeArrayState: PState.Mark = null
 
 Review comment:
   This is related to DAFFODIL-1960. In this issue, we are keeping a mark to 
the very beginning of an array and it is never discarded until the end of the 
array. That means we could potentially back track to where the array started so 
we cannot discard any data when streaming. This is common in formats like PCAP 
where the entire file is just a repetition of packets. Is there any way around 
this, so that beforeArrayState is discarded at some point? maybe via 
discriminators or something?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to