This is an automated email from the ASF dual-hosted git repository. cdutz pushed a commit to branch rel/0.13 in repository https://gitbox.apache.org/repos/asf/plc4x.git
commit dd37aad7e85296fc7d281f08f7a9183b37e9d50f Author: Christofer Dutz <[email protected]> AuthorDate: Fri Aug 22 20:29:24 2025 +0200 chore: Pulled changes from develop. --- .github/workflows/c-platform.yml | 4 +- .github/workflows/changelog.yml | 4 +- .github/workflows/dependency-review.yml | 2 +- .github/workflows/go-platform.yml | 4 +- .github/workflows/java-platform.yml | 4 +- .github/workflows/python-platform.yml | 4 +- .github/workflows/sast.yaml | 2 +- .../templates/go/complex-type-template.go.ftlh | 20 ++--- plc4j/drivers/ctrlx/pom.xml | 2 +- plc4j/drivers/opcua/pom.xml | 2 +- .../s7/readwrite/protocol/S7ProtocolLogic.java | 31 ++++++++ .../readwrite/optimizer/S7BlockReadOptimizer.java | 92 ++++++++++++++-------- .../readwrite/protocol/S7ProtocolLogic.java | 32 ++++++-- .../java/s7/readwrite/ManualS7CounterTest.java | 4 +- .../plc4x/java/spi/values/PlcRawByteArray.java | 18 +++++ .../utils/cache/CachedPlcConnectionManager.java | 4 + .../java/utils/cache/ConnectionContainer.java | 6 ++ pom.xml | 14 ++-- website/antora-playbook.yml | 5 +- .../modules/developers/pages/preparing/linux.adoc | 29 +++---- .../modules/developers/pages/preparing/macos.adoc | 10 +-- .../developers/pages/preparing/windows.adoc | 14 ++-- .../modules/developers/pages/release/release.adoc | 15 ++-- website/resources/plc4x-doap.rdf | 9 ++- 24 files changed, 222 insertions(+), 109 deletions(-) diff --git a/.github/workflows/c-platform.yml b/.github/workflows/c-platform.yml index a881a02bef..0f76e9807a 100644 --- a/.github/workflows/c-platform.yml +++ b/.github/workflows/c-platform.yml @@ -59,10 +59,10 @@ jobs: run: brew install libpcap - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Java - uses: actions/setup-java@v4 + uses: actions/setup-java@v5 with: distribution: 'adopt' java-package: jdk diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index 64ea2bdc38..ebc8bca6be 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -20,12 +20,13 @@ name: 'Generate changelog' on: release: types: [created, edited] + workflow_dispatch: jobs: generate-changelog: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: BobAnkh/[email protected] @@ -33,3 +34,4 @@ jobs: ACCESS_TOKEN: ${{secrets.GITHUB_TOKEN}} TYPE: 'feat:Feature,fix:Bug Fixes,docs:Documentation,refactor:Refactor,perf:Performance Improvements' PATH: 'CHANGELOG.md' + BRANCH: 'chore/changelog_update' diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 31516b289c..d24dccf4bf 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -37,6 +37,6 @@ jobs: runs-on: ubuntu-latest steps: - name: 'Checkout Repository' - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: 'Dependency Review' uses: actions/dependency-review-action@v4 diff --git a/.github/workflows/go-platform.yml b/.github/workflows/go-platform.yml index f9919f5040..954a8a89ff 100644 --- a/.github/workflows/go-platform.yml +++ b/.github/workflows/go-platform.yml @@ -103,10 +103,10 @@ jobs: run: choco install -y nmap - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Java - uses: actions/setup-java@v4 + uses: actions/setup-java@v5 with: distribution: 'adopt' java-package: jdk diff --git a/.github/workflows/java-platform.yml b/.github/workflows/java-platform.yml index b6e33e9d0e..3efb21d565 100644 --- a/.github/workflows/java-platform.yml +++ b/.github/workflows/java-platform.yml @@ -61,10 +61,10 @@ jobs: run: brew install libpcap - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Java - uses: actions/setup-java@v4 + uses: actions/setup-java@v5 with: distribution: 'adopt' java-package: jdk diff --git a/.github/workflows/python-platform.yml b/.github/workflows/python-platform.yml index d79144825c..8d191b2a67 100644 --- a/.github/workflows/python-platform.yml +++ b/.github/workflows/python-platform.yml @@ -61,14 +61,14 @@ jobs: if: matrix.os == 'macos-latest' run: brew install libpcap - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v5 with: python-version: '${{ matrix.python-version }}' - name: Setup Java - uses: actions/setup-java@v4 + uses: actions/setup-java@v5 with: distribution: 'adopt' java-package: jdk diff --git a/.github/workflows/sast.yaml b/.github/workflows/sast.yaml index 1eddc796fd..da20c16c3b 100644 --- a/.github/workflows/sast.yaml +++ b/.github/workflows/sast.yaml @@ -35,7 +35,7 @@ jobs: if: false steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Run Trivy vulnerability scanner in repo mode uses: aquasecurity/trivy-action@master diff --git a/code-generation/language/go/src/main/resources/templates/go/complex-type-template.go.ftlh b/code-generation/language/go/src/main/resources/templates/go/complex-type-template.go.ftlh index 7040a9c148..74cb97dc59 100644 --- a/code-generation/language/go/src/main/resources/templates/go/complex-type-template.go.ftlh +++ b/code-generation/language/go/src/main/resources/templates/go/complex-type-template.go.ftlh @@ -403,7 +403,7 @@ type _${type.name}Builder struct { childBuilder _${type.name}ChildBuilder </#if> - err *utils.MultiError + collectedErr []error } var _ (${type.name}Builder) = (*_${type.name}Builder)(nil) @@ -448,10 +448,7 @@ func (b *_${type.name}Builder) With<#if field.isOptionalField()>Optional</#if>${ var err error b.${field.name?cap_first}, err = builder.Build() if err != nil { - if b.err == nil { - b.err = &utils.MultiError{MainError: errors.New("sub builder failed")}<@emitImport import="github.com/pkg/errors" /> - } - b.err.Append(errors.Wrap(err, "${helper.getLanguageTypeNameForField(field)}Builder failed")) + b.collectedErr = append(b.collectedErr, errors.Wrap(err, "${helper.getLanguageTypeNameForField(field)}Builder failed")) } return b } @@ -472,15 +469,12 @@ func (b *_${type.name}Builder) <#if type.isAbstract()>Partial</#if>Build() (${ty <#list type.propertyFields?filter(field->!field.isOptionalField()) as field> <#if field.type.isComplexTypeReference() || helper.needsPointerAccess(field)> if b.${field.name?cap_first} == nil { - if b.err == nil { - b.err = new(utils.MultiError) - } - b.err.Append(errors.New("mandatory field '${field.name}' not set")) + b.collectedErr = append(b.collectedErr, errors.New("mandatory field '${field.name}' not set")) } </#if> </#list> - if b.err != nil{ - return nil, errors.Wrap(b.err, "error occurred during build")<@emitImport import="github.com/pkg/errors" /> + if err := stdErrors.Join(b.collectedErr...); err != nil {<@emitImportWithAlias alias="stdErrors" import="errors" /> + return nil, errors.Wrap(err, "error occurred during build")<@emitImport import="github.com/pkg/errors" /> } return b._${type.name}.deepCopy(), nil } @@ -551,8 +545,8 @@ func (b *_${type.name}Builder) DeepCopy() any { _copy.childBuilder = b.childBuilder.DeepCopy().(_${type.name}ChildBuilder) _copy.childBuilder.setParent(_copy) </#if> - if b.err != nil { - _copy.err = b.err.DeepCopy().(*utils.MultiError) + if b.collectedErr != nil { + copy(_copy.collectedErr, b.collectedErr) } return _copy } diff --git a/plc4j/drivers/ctrlx/pom.xml b/plc4j/drivers/ctrlx/pom.xml index 200e3a5445..0435d098d2 100644 --- a/plc4j/drivers/ctrlx/pom.xml +++ b/plc4j/drivers/ctrlx/pom.xml @@ -187,7 +187,7 @@ <dependency> <groupId>org.jetbrains.kotlin</groupId> <artifactId>kotlin-stdlib-jdk8</artifactId> - <version>2.2.0</version> + <version>2.2.10</version> </dependency> </dependencies> </dependencyManagement> diff --git a/plc4j/drivers/opcua/pom.xml b/plc4j/drivers/opcua/pom.xml index e135d6f90b..9ce7bea4ad 100644 --- a/plc4j/drivers/opcua/pom.xml +++ b/plc4j/drivers/opcua/pom.xml @@ -211,7 +211,7 @@ <dependency> <groupId>com.github.docker-java</groupId> <artifactId>docker-java-api</artifactId> - <version>3.5.3</version> + <version>3.6.0</version> <scope>test</scope> </dependency> <dependency> diff --git a/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7/readwrite/protocol/S7ProtocolLogic.java b/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7/readwrite/protocol/S7ProtocolLogic.java index 883f592aab..bc759e2c4b 100644 --- a/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7/readwrite/protocol/S7ProtocolLogic.java +++ b/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7/readwrite/protocol/S7ProtocolLogic.java @@ -2060,6 +2060,25 @@ public class S7ProtocolLogic extends Plc4xProtocolBase<TPKTPacket> { if((tag.getDataType() == TransportSize.BYTE) && (tag.getNumberOfElements() > 1)) { byteBuffer = ByteBuffer.allocate(tag.getNumberOfElements()); byteBuffer.put(plcValue.getRaw()); + } else if((tag.getDataType() == TransportSize.BOOL) && (tag.getNumberOfElements() > 1)) { + if(!(plcValue instanceof PlcList)) { + throw new PlcRuntimeException(String.format("Expected a PlcList with %d PlcBOOL elements", tag.getNumberOfElements())); + } + PlcList plcList = (PlcList) plcValue; + int numBytes = (tag.getNumberOfElements() + 7) / 8; + byteBuffer = ByteBuffer.allocate(numBytes); + for (int i = 0; i < tag.getNumberOfElements(); i++) { + if(!(plcList.getIndex(i) instanceof PlcBOOL)) { + throw new PlcRuntimeException(String.format("Expected a PlcList with %d PlcBOOL elements", tag.getNumberOfElements())); + } + PlcBOOL plcBOOL = (PlcBOOL) plcList.getIndex(i); + if(plcBOOL.getBoolean()) { + int curByte = i / 8; + int curBit = i % 8; + byteBuffer.put(curByte, (byte) (1 << curBit | byteBuffer.get(curByte))); + } + } + transportSize = DataTransportSize.BYTE_WORD_DWORD; } else { for (int i = 0; i < tag.getNumberOfElements(); i++) { int lengthInBits = DataItem.getLengthInBits(plcValue.getIndex(i), tag.getDataType().getDataProtocolId(), s7DriverContext.getControllerType(), stringLength); @@ -2104,6 +2123,15 @@ public class S7ProtocolLogic extends Plc4xProtocolBase<TPKTPacket> { // probably expecting to process the read raw data. if(tag.getDataType() == TransportSize.BYTE) { return new PlcRawByteArray(data); + } else if(tag.getDataType() == TransportSize.BOOL) { + final PlcValue[] resultItems = IntStream.range(0, tag.getNumberOfElements()).mapToObj(i -> { + int bitOffset = i; + int byteOffset = bitOffset / 8; + bitOffset = bitOffset % 8; + boolean bitValue = ((data[byteOffset] >> bitOffset) & 0x01) != 0; + return PlcBOOL.of(bitValue); + }).toArray(PlcValue[]::new); + return DefaultPlcValueHandler.of(tag, resultItems); } else { // Fetch all final PlcValue[] resultItems = IntStream.range(0, tag.getNumberOfElements()).mapToObj(i -> { @@ -2202,6 +2230,9 @@ public class S7ProtocolLogic extends Plc4xProtocolBase<TPKTPacket> { transportSize = TransportSize.CHAR; int stringLength = (s7Tag instanceof S7StringFixedLengthTag) ? ((S7StringFixedLengthTag) s7Tag).getStringLength() : 254; numElements = numElements * (stringLength + 2) * 2; + } else if ((transportSize == TransportSize.BOOL) && (s7Tag.getNumberOfElements() > 1)) { + numElements = (s7Tag.getNumberOfElements() + 7) / 8; + transportSize = TransportSize.BYTE; } if (transportSize.getCode() == 0x00) { numElements = numElements * transportSize.getSizeInBytes(); diff --git a/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7light/readwrite/optimizer/S7BlockReadOptimizer.java b/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7light/readwrite/optimizer/S7BlockReadOptimizer.java index e6d0119b76..41422a2ad6 100644 --- a/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7light/readwrite/optimizer/S7BlockReadOptimizer.java +++ b/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7light/readwrite/optimizer/S7BlockReadOptimizer.java @@ -37,6 +37,7 @@ import org.apache.plc4x.java.spi.messages.utils.DefaultPlcTagItem; import org.apache.plc4x.java.spi.messages.utils.PlcResponseItem; import org.apache.plc4x.java.spi.messages.utils.PlcTagItem; import org.apache.plc4x.java.spi.values.DefaultPlcValueHandler; +import org.apache.plc4x.java.spi.values.PlcBOOL; import org.apache.plc4x.java.spi.values.PlcNull; import org.apache.plc4x.java.spi.values.PlcRawByteArray; import org.slf4j.Logger; @@ -68,7 +69,7 @@ public class S7BlockReadOptimizer extends S7Optimizer { } // Sort the tags by area - // (We can only read multiple tags in one byte array, if they are located in the same area) + // (We can only read multiple tags in one byte array if they are located in the same area) Map<String, Map<PlcTag, String>> sortedTagsPerArea = new HashMap<>(); for (String tagName : readRequest.getTagNames()) { PlcTag tag = readRequest.getTag(tagName); @@ -85,7 +86,7 @@ public class S7BlockReadOptimizer extends S7Optimizer { } else if(tag instanceof S7Tag) { S7Tag s7Tag = (S7Tag) tag; MemoryArea memoryArea = s7Tag.getMemoryArea(); - // When reading DATA_BLOCKS we need to also use the block number. + // When reading DATA_BLOCKS, we need to also use the block number. String areaName = memoryArea.getShortName(); if(memoryArea == MemoryArea.DATA_BLOCKS) { areaName += s7Tag.getBlockNumber(); @@ -122,7 +123,7 @@ public class S7BlockReadOptimizer extends S7Optimizer { // TODO: Implement the size optimizedTagMap.put(new TagNameSize(tagList.get(plcTag), 0), new DefaultPlcTagItem<>(plcTag)); } - // Var-length strings, are a performance nightmare. Trying to optimize reading them is probably not + // Var-length strings are a performance nightmare. Trying to optimize reading them is probably not // worth the effort. For now, we simply handle them as un-chunked tags. else if(plcTag instanceof S7StringVarLengthTag) { // A var-length string tag simply reads 2 or 4 bytes. @@ -135,8 +136,11 @@ public class S7BlockReadOptimizer extends S7Optimizer { else if (plcTag instanceof S7Tag) { S7Tag s7Tag = (S7Tag) plcTag; - int curTagSize = s7Tag.getDataType().getSizeInBytes() * s7Tag.getNumberOfElements(); - // In case of fixed length strings, a string starts with two bytes: max length, + // If the dataType is BOOL, the size in bytes needs to be calculated differently. + // Especially if it's more than one element. + int curTagSize = s7Tag.getDataType() == TransportSize.BOOL ? (s7Tag.getNumberOfElements() + 7) / 8 : + s7Tag.getDataType().getSizeInBytes() * s7Tag.getNumberOfElements(); + // In the case of fixed length strings, a string starts with two bytes: max length, // actual length and then the string bytes after that. if(s7Tag instanceof S7StringFixedLengthTag) { S7StringFixedLengthTag stringFixedLengthTag = (S7StringFixedLengthTag) s7Tag; @@ -144,14 +148,14 @@ public class S7BlockReadOptimizer extends S7Optimizer { curTagSize = ((2 * bytesPerChar) + (stringFixedLengthTag.getStringLength() * bytesPerChar)) * s7Tag.getNumberOfElements(); } - // If this is the first tag, use that as starting point. + // If this is the first tag, use that as a starting point. if(currentMemoryArea == null) { currentMemoryArea = s7Tag.getMemoryArea(); currentDataBlockNumber = s7Tag.getBlockNumber(); currentChunkStartByteOffset = s7Tag.getByteOffset(); currentChunkEndByteOffset = s7Tag.getByteOffset() + curTagSize; } - // If the next tag would be more bytes away than a s7 address item requires, it's cheaper to + // If the next tag is more bytes away than a s7 address item requires, it cost fewer resources to // split up into multiple items. else if(currentChunkEndByteOffset + S7_ADDRESS_ANY_SIZE < s7Tag.getByteOffset()) { // Save the current chunk. @@ -169,7 +173,8 @@ public class S7BlockReadOptimizer extends S7Optimizer { } // Otherwise extend the array size to include this tag. else { - currentChunkEndByteOffset = s7Tag.getByteOffset() + curTagSize; + // Check if adding this tag would increase the size of the array. + currentChunkEndByteOffset = Math.max(currentChunkEndByteOffset, s7Tag.getByteOffset() + curTagSize); } // Add the tag to the list of tags for the current chunk. @@ -187,7 +192,7 @@ public class S7BlockReadOptimizer extends S7Optimizer { } // Go through all chunks. If there are ones larger than the max PDU size, split them up into - // multiple tags, that utilize the packets to the maximum. + // multiple tags that use the packets to the maximum. final int maxRequestSize = ((S7DriverContext) driverContext).getPduSize() - (EMPTY_READ_RESPONSE_SIZE + 4); Map<TagNameSize, PlcTagItem<PlcTag>> optimizedTagMap2 = new TreeMap<>(); for (TagNameSize tagNameSize : optimizedTagMap.keySet()) { @@ -224,17 +229,17 @@ public class S7BlockReadOptimizer extends S7Optimizer { } } - // Using the First Fit Decreasing (FFD) bin-packing algorithm try to find the ideal - // packing for utilizing request sizes. + // Using the First Fit Decreasing (FFD) bin-packing algorithm, try to find the ideal + // packing for using request sizes. // 1. Assign a size to each tag // 2. Sort the tags by size (biggest first) - // 3. Repeat this, till all tags are consumed + // 3. Repeat this till all tags are consumed // 1. Take the first packet of the list // 2. If the tag itself exceeds the max request size, keep on splitting it into chunks until - // the rest would fit into a request. Then proceed with the rest as if it was a normal tag + // the rest fit into a request. Then proceed with the rest as if it was a normal tag // 2. Go through the existing list of requests and check if the current tag would fit // 1. If it fits, add it to the request - // 2. If it doesn't fit go to the next request and check + // 2. If it doesn't fit, go to the next request and check // 3. If you reach the end, and it didn't fit any of the previous requests, add a new one LinkedHashMap<String, PlcTagItem<PlcTag>> executableTagMap = new LinkedHashMap<>(); for (TagNameSize tagNameSize : optimizedTagMap2.keySet()) { @@ -265,7 +270,7 @@ public class S7BlockReadOptimizer extends S7Optimizer { // Have the upstream optimizer handle its thing. PlcReadResponse rawReadResponse = super.processReadResponses(new DefaultPlcReadRequest(((DefaultPlcReadRequest) readRequest).getReader(), tags), readResponses, driverContext); - // Merge together split-up chunks. + // Merge split-up chunks. LinkedHashMap<String, PlcTagItem<PlcTag>> mergedTagItems = new LinkedHashMap<>(); Map<String, PlcResponseItem<PlcValue>> mergedValues = new LinkedHashMap<>(); for (String tagName : rawReadResponse.getTagNames()) { @@ -307,7 +312,7 @@ public class S7BlockReadOptimizer extends S7Optimizer { mergedValues.put(tagBaseName, new DefaultPlcResponseItem<>(PlcResponseCode.OK, new PlcRawByteArray(chunkData))); } } - // All others are just un-split chunks + // All others are just unsplit chunks else { PlcResponseCode responseCode = rawReadResponse.getResponseCode(tagName); PlcValue plcValue = rawReadResponse.getPlcValue(tagName); @@ -317,7 +322,7 @@ public class S7BlockReadOptimizer extends S7Optimizer { } PlcReadResponse mergedReadResponse = new DefaultPlcReadResponse(new DefaultPlcReadRequest(((DefaultPlcReadRequest)rawReadResponse.getRequest()).getReader(), mergedTagItems), mergedValues); - // If a Tag is a normal tag, just copy it over. However, if it's a S7TagChunk, process it. + // If a Tag is a normal tag, just copy it over. However, if it's an S7TagChunk, process it. Map<String, PlcResponseItem<PlcValue>> values = new HashMap<>(); for (String tagName : mergedReadResponse.getTagNames()) { PlcResponseCode responseCode = mergedReadResponse.getResponseCode(tagName); @@ -341,7 +346,8 @@ public class S7BlockReadOptimizer extends S7Optimizer { S7Tag s7Tag = (S7Tag) plcTag; String curTagName = s7TagChunk.getChunkTags().get(plcTag); int curTagStartPosition = s7Tag.getByteOffset() - chunkByteOffset; - int curTagDataSize = s7Tag.getDataType().getSizeInBytes() * s7Tag.getNumberOfElements(); + int curTagDataSize = s7Tag.getDataType() == TransportSize.BOOL ? (s7Tag.getNumberOfElements() + 7) / 8 : + s7Tag.getDataType().getSizeInBytes() * s7Tag.getNumberOfElements(); if(s7Tag instanceof S7StringFixedLengthTag) { S7StringFixedLengthTag s7StringFixedLengthTag = (S7StringFixedLengthTag) s7Tag; if(s7Tag.getDataType() == TransportSize.WSTRING) { @@ -371,13 +377,16 @@ public class S7BlockReadOptimizer extends S7Optimizer { } return s7Tag1.getSzlId() - s7Tag2.getSzlId(); } else if (tag1 instanceof S7ClkTag) { - // Technically CLK tags should be identical as there's + // Technically, CLK tags should be identical as there's // only one address for reading the PLC clock information. return 0; } else if (tag1 instanceof S7Tag) { S7Tag s7Tag1 = (S7Tag) tag1; S7Tag s7Tag2 = (S7Tag) tag2; if (s7Tag1.getByteOffset() == s7Tag2.getByteOffset()) { + if (s7Tag1.getBitOffset() == s7Tag2.getBitOffset()) { + return s7Tag1.getNumberOfElements() - s7Tag2.getNumberOfElements(); + } return s7Tag1.getBitOffset() - s7Tag2.getBitOffset(); } return s7Tag1.getByteOffset() - s7Tag2.getByteOffset(); @@ -391,25 +400,44 @@ public class S7BlockReadOptimizer extends S7Optimizer { try { int stringLength = (tag instanceof S7StringFixedLengthTag) ? ((S7StringFixedLengthTag) tag).getStringLength() : 254; if (tag.getNumberOfElements() == 1) { - return DataItem.staticParse(readBuffer, tag.getDataType().getDataProtocolId(), - s7DriverContext.getControllerType(), stringLength); + // If a boolean is being read, we need to manually parse it as we are reading bytes and not single bits. + if(tag.getDataType() == TransportSize.BOOL) { + boolean bitValue = ((data[0] >> tag.getBitOffset()) & 0x01) != 0; + return PlcBOOL.of(bitValue); + } else { + return DataItem.staticParse(readBuffer, tag.getDataType().getDataProtocolId(), + s7DriverContext.getControllerType(), stringLength); + } } else { // In case of reading an array of bytes, make use of our simpler PlcRawByteArray as the user is // probably expecting to process the read raw data. if(tag.getDataType() == TransportSize.BYTE) { return new PlcRawByteArray(data); } else { - // Fetch all - final PlcValue[] resultItems = IntStream.range(0, tag.getNumberOfElements()).mapToObj(i -> { - try { - return DataItem.staticParse(readBuffer, tag.getDataType().getDataProtocolId(), - s7DriverContext.getControllerType(), stringLength); - } catch (ParseException e) { - logger.warn("Error parsing tag item of type: '{}' (at position {}})", tag.getDataType().name(), i, e); - } - return null; - }).toArray(PlcValue[]::new); - return DefaultPlcValueHandler.of(tag, resultItems); + // If a boolean is being read, we need to manually parse it as we are reading bytes and not single bits. + if(tag.getDataType() == TransportSize.BOOL) { + int rootBitOffset = tag.getBitOffset(); + final PlcValue[] resultItems = IntStream.range(0, tag.getNumberOfElements()).mapToObj(i -> { + int bitOffset = rootBitOffset + i; + int byteOffset = bitOffset / 8; + bitOffset = bitOffset % 8; + boolean bitValue = ((data[byteOffset] >> bitOffset) & 0x01) != 0; + return PlcBOOL.of(bitValue); + }).toArray(PlcValue[]::new); + return DefaultPlcValueHandler.of(tag, resultItems); + } else { + // Fetch all + final PlcValue[] resultItems = IntStream.range(0, tag.getNumberOfElements()).mapToObj(i -> { + try { + return DataItem.staticParse(readBuffer, tag.getDataType().getDataProtocolId(), + s7DriverContext.getControllerType(), stringLength); + } catch (ParseException e) { + logger.warn("Error parsing tag item of type: '{}' (at position {}})", tag.getDataType().name(), i, e); + } + return null; + }).toArray(PlcValue[]::new); + return DefaultPlcValueHandler.of(tag, resultItems); + } } } } catch (ParseException e) { diff --git a/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7light/readwrite/protocol/S7ProtocolLogic.java b/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7light/readwrite/protocol/S7ProtocolLogic.java index 939c2d97b2..357e7ea02a 100644 --- a/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7light/readwrite/protocol/S7ProtocolLogic.java +++ b/plc4j/drivers/s7/src/main/java/org/apache/plc4x/java/s7light/readwrite/protocol/S7ProtocolLogic.java @@ -608,7 +608,7 @@ public class S7ProtocolLogic extends Plc4xProtocolBase<TPKTPacket> { short errorClass; short errorCode; - S7ParameterUserDataItemCPUFunctions parameteritem; + S7ParameterUserDataItemCPUFunctions parameterItem; if (responseMessage instanceof S7MessageResponseData) { S7MessageResponseData messageResponseData = (S7MessageResponseData) responseMessage; errorClass = messageResponseData.getErrorClass(); @@ -620,9 +620,9 @@ public class S7ProtocolLogic extends Plc4xProtocolBase<TPKTPacket> { } else if (responseMessage instanceof S7MessageUserData) { S7MessageUserData messageResponse = (S7MessageUserData) responseMessage; S7ParameterUserData parameters = (S7ParameterUserData) messageResponse.getParameter(); - parameteritem = (S7ParameterUserDataItemCPUFunctions) parameters.getItems().get(0); + parameterItem = (S7ParameterUserDataItemCPUFunctions) parameters.getItems().get(0); errorClass = 0; - errorCode = parameteritem.getErrorCode().shortValue(); + errorCode = parameterItem.getErrorCode().shortValue(); } else { throw new PlcProtocolException("Unsupported message type " + responseMessage.getClass().getName()); } @@ -665,7 +665,7 @@ public class S7ProtocolLogic extends Plc4xProtocolBase<TPKTPacket> { // If the numbers of items don't match, we're in big trouble as the only // way to know how to interpret the responses is by aligning them with the - // items from the request as this information is not returned by the PLC. + // items from the request as the PLC does not return this information. if (plcReadRequest.getNumberOfTags() != payload.getItems().size()) { throw new PlcProtocolException( "The number of requested items doesn't match the number of returned items"); @@ -781,6 +781,25 @@ public class S7ProtocolLogic extends Plc4xProtocolBase<TPKTPacket> { if((tag.getDataType() == TransportSize.BYTE) && (tag.getNumberOfElements() > 1)) { byteBuffer = ByteBuffer.allocate(tag.getNumberOfElements()); byteBuffer.put(plcValue.getRaw()); + } else if((tag.getDataType() == TransportSize.BOOL) && (tag.getNumberOfElements() > 1)) { + if(!(plcValue instanceof PlcList)) { + throw new PlcRuntimeException(String.format("Expected a PlcList with %d PlcBOOL elements", tag.getNumberOfElements())); + } + PlcList plcList = (PlcList) plcValue; + int numBytes = (tag.getNumberOfElements() + 7) / 8; + byteBuffer = ByteBuffer.allocate(numBytes); + for (int i = 0; i < tag.getNumberOfElements(); i++) { + if(!(plcList.getIndex(i) instanceof PlcBOOL)) { + throw new PlcRuntimeException(String.format("Expected a PlcList with %d PlcBOOL elements", tag.getNumberOfElements())); + } + PlcBOOL plcBOOL = (PlcBOOL) plcList.getIndex(i); + if(plcBOOL.getBoolean()) { + int curByte = i / 8; + int curBit = i % 8; + byteBuffer.put(curByte, (byte) (1 << curBit | byteBuffer.get(curByte))); + } + } + transportSize = DataTransportSize.BYTE_WORD_DWORD; } else { for (int i = 0; i < tag.getNumberOfElements(); i++) { int lengthInBits = DataItem.getLengthInBits(plcValue.getIndex(i), tag.getDataType().getDataProtocolId(), s7DriverContext.getControllerType(), stringLength); @@ -900,7 +919,7 @@ public class S7ProtocolLogic extends Plc4xProtocolBase<TPKTPacket> { } /** - * Currently we only support the S7 Any type of addresses. This helper simply converts the S7Tag + * Currently, we only support the S7 Any type of addresses. This helper simply converts the S7Tag * from PLC4X into S7Address objects. * * @param tag S7Tag instance we need to convert into an S7Address @@ -923,6 +942,9 @@ public class S7ProtocolLogic extends Plc4xProtocolBase<TPKTPacket> { transportSize = TransportSize.CHAR; int stringLength = (s7Tag instanceof S7StringFixedLengthTag) ? ((S7StringFixedLengthTag) s7Tag).getStringLength() : 254; numElements = numElements * (stringLength + 2) * 2; + } else if ((transportSize == TransportSize.BOOL) && (s7Tag.getNumberOfElements() > 1)) { + numElements = (s7Tag.getNumberOfElements() + 7) / 8; + transportSize = TransportSize.BYTE; } if (transportSize.getCode() == 0x00) { numElements = numElements * transportSize.getSizeInBytes(); diff --git a/plc4j/drivers/s7/src/test/java/org/apache/plc4x/java/s7/readwrite/ManualS7CounterTest.java b/plc4j/drivers/s7/src/test/java/org/apache/plc4x/java/s7/readwrite/ManualS7CounterTest.java index 917a380c62..1ebadc9c77 100644 --- a/plc4j/drivers/s7/src/test/java/org/apache/plc4x/java/s7/readwrite/ManualS7CounterTest.java +++ b/plc4j/drivers/s7/src/test/java/org/apache/plc4x/java/s7/readwrite/ManualS7CounterTest.java @@ -52,9 +52,9 @@ public class ManualS7CounterTest { writeBuilder.addTagAddress("counter-3", "%C18:COUNTER", Integer.decode("0x0012")); final PlcWriteRequest writeRequest = writeBuilder.build(); - final PlcWriteResponse writeResponse = writeRequest.execute().get(); + final PlcWriteResponse writeResposne = writeRequest.execute().get(); - if ( writeResponse.getResponseCode("counter-3") == PlcResponseCode.OK ){ + if ( writeResposne.getResponseCode("counter-3") == PlcResponseCode.OK ){ System.out.println("Write the counter"); } else { System.out.println("Problems...."); diff --git a/plc4j/spi/src/main/java/org/apache/plc4x/java/spi/values/PlcRawByteArray.java b/plc4j/spi/src/main/java/org/apache/plc4x/java/spi/values/PlcRawByteArray.java index 9cf8fd096d..6de9765c35 100644 --- a/plc4j/spi/src/main/java/org/apache/plc4x/java/spi/values/PlcRawByteArray.java +++ b/plc4j/spi/src/main/java/org/apache/plc4x/java/spi/values/PlcRawByteArray.java @@ -20,9 +20,13 @@ package org.apache.plc4x.java.spi.values; import org.apache.commons.codec.binary.Hex; import org.apache.plc4x.java.api.types.PlcValueType; +import org.apache.plc4x.java.api.value.PlcValue; import org.apache.plc4x.java.spi.generation.SerializationException; import org.apache.plc4x.java.spi.generation.WriteBuffer; +import java.util.ArrayList; +import java.util.List; + public class PlcRawByteArray extends PlcIECValue<byte[]> { public static PlcRawByteArray of(Object value) { @@ -59,4 +63,18 @@ public class PlcRawByteArray extends PlcIECValue<byte[]> { writeBuffer.writeByteArray(getClass().getSimpleName(), value); } + @Override + public boolean isList() { + return true; + } + + @Override + public List<PlcValue> getList() { + List<PlcValue> shortList = new ArrayList<>(value.length); + for (byte b : value) { + shortList.add(new PlcSINT((short) b)); + } + return shortList; + } + } diff --git a/plc4j/tools/connection-cache/src/main/java/org/apache/plc4x/java/utils/cache/CachedPlcConnectionManager.java b/plc4j/tools/connection-cache/src/main/java/org/apache/plc4x/java/utils/cache/CachedPlcConnectionManager.java index 0c1ea133c8..67ea13b3db 100644 --- a/plc4j/tools/connection-cache/src/main/java/org/apache/plc4x/java/utils/cache/CachedPlcConnectionManager.java +++ b/plc4j/tools/connection-cache/src/main/java/org/apache/plc4x/java/utils/cache/CachedPlcConnectionManager.java @@ -79,6 +79,10 @@ public class CachedPlcConnectionManager implements PlcConnectionManager, AutoClo */ public void removeCachedConnection(String url) { synchronized (connectionContainers) { + // Make sure the connection is closed before removing it. + if(connectionContainers.containsKey(url)) { + connectionContainers.get(url).close(); + } connectionContainers.remove(url); } } diff --git a/plc4j/tools/connection-cache/src/main/java/org/apache/plc4x/java/utils/cache/ConnectionContainer.java b/plc4j/tools/connection-cache/src/main/java/org/apache/plc4x/java/utils/cache/ConnectionContainer.java index a64995a99b..a40e722699 100644 --- a/plc4j/tools/connection-cache/src/main/java/org/apache/plc4x/java/utils/cache/ConnectionContainer.java +++ b/plc4j/tools/connection-cache/src/main/java/org/apache/plc4x/java/utils/cache/ConnectionContainer.java @@ -71,6 +71,12 @@ class ConnectionContainer { // Clear the queue. queue.clear(); + // Stop the idle timer. + if(idleTimer != null) { + idleTimer.cancel(); + idleTimer.purge(); + } + // If the connection is currently used, close it. if(leasedConnection != null) { try { diff --git a/pom.xml b/pom.xml index d2a5cda413..176978c08f 100644 --- a/pom.xml +++ b/pom.xml @@ -104,15 +104,15 @@ <antlr.version>4.13.2</antlr.version> <apiguardian.version>1.1.2</apiguardian.version> - <assertj.version>3.27.3</assertj.version> + <assertj.version>3.27.4</assertj.version> <awaitility.version>4.3.0</awaitility.version> <!-- The currently available version is 2.0.5 and anything above 1.4.3 is no longer compatible with our code --> <bit-io.version>1.4.3</bit-io.version> <bouncycastle.version>1.81</bouncycastle.version> - <byte-buddy.version>1.17.6</byte-buddy.version> + <byte-buddy.version>1.17.7</byte-buddy.version> <cmake.version>3.31.5-b1</cmake.version> <commons-beanutils.version>1.9.4</commons-beanutils.version> - <commons-cli.version>1.9.0</commons-cli.version> + <commons-cli.version>1.10.0</commons-cli.version> <commons-codec.version>1.19.0</commons-codec.version> <commons-collections4.version>4.5.0</commons-collections4.version> <commons-compress.version>1.28.0</commons-compress.version> @@ -121,7 +121,7 @@ <commons-lang3.version>3.18.0</commons-lang3.version> <commons-logging.version>1.3.5</commons-logging.version> <commons-math3.version>3.6.1</commons-math3.version> - <commons-net.version>3.11.1</commons-net.version> + <commons-net.version>3.12.0</commons-net.version> <commons-text.version>1.14.0</commons-text.version> <commons-csv.version>1.14.1</commons-csv.version> <dom4j.version>2.2.0</dom4j.version> @@ -147,7 +147,7 @@ <logback.version>1.5.18</logback.version> <maven.clean.plugin.version>3.2.0</maven.clean.plugin.version> <milo.version>0.6.16</milo.version> - <mockito.version>5.18.0</mockito.version> + <mockito.version>5.19.0</mockito.version> <!-- Netty 4.2.x seems to break the serial transports --> <netty.version>4.1.123.Final</netty.version> <pcap4j.version>1.8.2</pcap4j.version> @@ -1010,7 +1010,7 @@ <plugin> <groupId>org.apache.karaf.tooling</groupId> <artifactId>karaf-maven-plugin</artifactId> - <version>4.4.7</version> + <version>4.4.8</version> </plugin> <plugin> @@ -1183,7 +1183,7 @@ <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> - <version>3.11.2</version> + <version>3.11.3</version> <configuration> <!-- This will suppress the generation of a hidden timestamp at the top of each generated html page diff --git a/website/antora-playbook.yml b/website/antora-playbook.yml index 2e0518fb7b..bed7199151 100644 --- a/website/antora-playbook.yml +++ b/website/antora-playbook.yml @@ -24,10 +24,13 @@ content: branches: HEAD edit_url: '{web_url}/edit/develop/{path}' start_path: website/asciidoc + - url: https://github.com/apache/plc4x.git + branches: ['rel/0.13'] + start_path: website/asciidoc - url: https://github.com/apache/plc4x.git branches: ['rel/0.12'] start_path: website/asciidoc -ui: +ui: bundle: url: https://gitlab.com/antora/antora-ui-default/-/jobs/artifacts/HEAD/raw/build/ui-bundle.zip?job=bundle-stable snapshot: true diff --git a/website/asciidoc/modules/developers/pages/preparing/linux.adoc b/website/asciidoc/modules/developers/pages/preparing/linux.adoc index ef335818f7..9d2cd69183 100644 --- a/website/asciidoc/modules/developers/pages/preparing/linux.adoc +++ b/website/asciidoc/modules/developers/pages/preparing/linux.adoc @@ -24,7 +24,7 @@ As tracking down issues which result from missing or outdated third party tools If any of the prerequisites are not met, the build fails with an error message that will help finding out what's wrong. The number of checks is highly dependent on which profiles are enabled. -So for example the availability and version of the C compiler is only checked if the `with-c` profile is enabled. +So for example, the availability and version of the C compiler is only checked if the `with-c` profile is enabled. If the check is reporting any issues, please feel free to follow the corresponding steps in this guide to install the tools. @@ -40,7 +40,7 @@ Apt-based systems: sudo apt install git -Yum based systems: +Yum-based systems: sudo yum install git @@ -50,9 +50,9 @@ Checking: java --version -If you get successful output indicating at least Java 11, you don't need to do anything. +If you get output successfully indicating at least Java 11, you don't need to do anything. -Apt based systems: +Apt-based systems: sudo apt install openjdk-21-jdk @@ -66,20 +66,21 @@ Apt-based systems: sudo apt install libpcap-dev -Yum based systems: +Yum-based systems: sudo yum install libpcap-devel -Using libpcap usually requires root privileges, however the java process can be granted permission to do so by executing the following command. +Using libpcap usually requires root privileges. +However, the java process can be granted permission to do so by executing the following command. sudo setcap cap_net_raw,cap_net_admin=eip /path/to/java `/path/to/java` however needs to be a real file and not a symlink. -So on my Ubuntu 22.04 system, where I installed java as described above the command is as follows: +So on my Ubuntu 22.04 system, where I installed java as described above, the command is as follows: sudo setcap cap_net_raw,cap_net_admin=eip /usr/lib/jvm/java-21-openjdk-arm64/bin/java -Sometimes I had to set the uid to execute the java executable with the permissions of the owner (root) ... however this is quite suboptimal solution. +Sometimes I had to set the uid to execute the java executable with the permissions of the owner (root) ... However, this is a quite suboptimal solution. sudo chmod 4755 /path/to/java @@ -97,7 +98,7 @@ Apt-based systems: sudo apt install gcc -Yum based systems: +Yum-based systems: sudo yum install gcc @@ -113,11 +114,11 @@ Microsoft has documented what is needed to install `.Net SDK` https://dotnet.mic For Ubuntu this would be: - wget -q https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb - sudo dpkg -i packages-microsoft-prod.deb - sudo apt install apt-transport-https + sudo mkdir -p /etc/apt/keyrings + wget https://packages.microsoft.com/keys/microsoft.asc -O /etc/apt/keyrings/microsoft.asc + echo "deb [arch=amd64 signed-by=/etc/apt/keyrings/microsoft.asc] https://packages.microsoft.com/ubuntu/24.04/prod noble main" | sudo tee /etc/apt/sources.list.d/microsoft-dotnet.list sudo apt update - sudo apt install dotnet-sdk-6.0 + sudo apt install dotnet-sdk-8.0 For other distributions, please select the distribution in the drop-down menu and follow the instructions there. @@ -139,7 +140,7 @@ Apt-based systems: sudo apt install python3 -Yum based systems: +Yum-based systems: yum intall python3 diff --git a/website/asciidoc/modules/developers/pages/preparing/macos.adoc b/website/asciidoc/modules/developers/pages/preparing/macos.adoc index a26c970f80..742fabc83e 100644 --- a/website/asciidoc/modules/developers/pages/preparing/macos.adoc +++ b/website/asciidoc/modules/developers/pages/preparing/macos.adoc @@ -28,7 +28,7 @@ So for example the availability and version of the C compiler is only checked if Most libraries on Mac are installed using `Homebrew`. -Make sure `Homebrew` ist installed in order to install most of these. +Make sure `Homebrew` ist installed to install most of these. /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" @@ -41,7 +41,7 @@ Checking: If you get any successful output, you probably don't need to do anything. If you are running this command on a really fresh and clean system, it might be that you are asked to finish installing the XCode commandline tools. -If this happens, just follow the process and re-try after it has finished. +If this happens, follow the process and re-try after it has finished. If you get a response that indicates that git needs to be installed, please execute the following command: @@ -91,9 +91,9 @@ If you get any successful output, you probably don't need to do anything. Microsoft is offering an installer for macOS which is available from https://dotnet.microsoft.com/download/dotnet-core/2.2[here]. -Alternatively you can also install it via homebrew: +Alternatively, you can also install it via homebrew: - brew install --cask dotnet-sdk + brew install --cask dotnet-sdk8 === python (For PLC4Py) @@ -103,7 +103,7 @@ Checking: If you get a version of 3.0.0 or higher reported, you probably don't need to do anything. -In order to install it, please execute the following command: +To install it, please execute the following command: brew install pyenv diff --git a/website/asciidoc/modules/developers/pages/preparing/windows.adoc b/website/asciidoc/modules/developers/pages/preparing/windows.adoc index c050f722d1..736e9a301a 100644 --- a/website/asciidoc/modules/developers/pages/preparing/windows.adoc +++ b/website/asciidoc/modules/developers/pages/preparing/windows.adoc @@ -48,21 +48,23 @@ Checking: java --version -If you get successful output, check that the version reported is at least Java 11. -If you need to install or update Java, unfortunately this doesn't seem to be available via Chocolatey, but needs to be installed directly from a distribution. +If you get output successfully, check that the version reported is at least Java 11. +If you need to install or update Java. +Unfortunately, this doesn't seem available via Chocolatey, but needs to be installed directly from a distribution. For Oracles OpenJDK 21 this would be from here: https://learn.microsoft.com/de-de/java/openjdk/download -If you are using a Windows VM on `aarch64` (Apple M1 or M2 virtual machine), the download available from Microsoft build seem to be one of the few options you have. When installing make sure to select the option to configure the "JAVA_HOME" environment variable (deactivated per default). +If you are using a Windows VM on `aarch64` (Apple M1 or M2 virtual machine), the download available from Microsoft build seems to be one of the few options you have. +When installing, make sure to select the option to configure the "JAVA_HOME" environment variable (deactivated per default). == Optional and other language support -Git an Java should be all you need for building the Java part of PLC4X. +Git and Java should be all you need for building the Java part of PLC4X. === LibPCAP (For raw-ethernet support) -In order to use the raw ethernet transport capabilities of PLC4X, we need to ensure the NPcap library is installed. +To use the raw ethernet transport capabilities of PLC4X, we need to ensure the NPcap library is installed. -In order to check if NPcap is installed, check the following directories: +To check if NPcap is installed, check the following directories: 64-bit versions: diff --git a/website/asciidoc/modules/developers/pages/release/release.adoc b/website/asciidoc/modules/developers/pages/release/release.adoc index 6639a8972f..7994503b77 100644 --- a/website/asciidoc/modules/developers/pages/release/release.adoc +++ b/website/asciidoc/modules/developers/pages/release/release.adoc @@ -502,7 +502,7 @@ After this https://dist.apache.org/repos/dist/release/plc4x should only contain Probably the simplest part is releasing the Maven artifacts. -In order to do this, the release manager logs into Nexus at https://repository.apache.org/, selects the staging repository and clicks on the `Release` button. +To do this, the release manager logs into Nexus at https://repository.apache.org/, selects the staging repository and clicks on the `Release` button. This will move all artifacts into the Apache release repository and delete the staging repository after that. @@ -528,18 +528,13 @@ This has to be done with git [subs="verbatim,attributes"] ---- git checkout release -git merge v{current-full-version} ----- - -When there are conflicts it could help to use the `theirs` merge strategy, i.e., - -[subs="verbatim,attributes"] ----- git merge -X theirs v{current-full-version} ---- -Possibly a manual conflict resolution has to be done afterwards. After that, changes need to -be pushed. +When there are conflicts the `theirs` merge strategy will most probably help resolve them, + +Possibly a manual conflict resolution has to be done afterward. +After that, changes need to be pushed. == Updating GitHub Issues diff --git a/website/resources/plc4x-doap.rdf b/website/resources/plc4x-doap.rdf index d060ffdc5d..6cafe2e375 100644 --- a/website/resources/plc4x-doap.rdf +++ b/website/resources/plc4x-doap.rdf @@ -88,7 +88,14 @@ <release> <Version> - <name>Latest 0.12.0 release</name> + <name>Latest 0.13.0 release</name> + <created>2025-08-05</created> + <revision>0.13.0</revision> + </Version> + </release> + <release> + <Version> + <name>Legacy 0.12.0 release</name> <created>2024-02-19</created> <revision>0.12.0</revision> </Version>
