how about?
unsigned oct2int(const char *oct)
{
int ch;
unsigned value = 0;
while (((ch = *oct++) >= '0') &&
(ch <= '7'))
{
value <<= 3;
value += (ch - '0');
}
return value;
}
Regards
-Bill Knight
R O SoftWare
On Wed, 28 Apr 2004 23:50:02 +0200, Robert Seczkowski wrote:
>Horse and saddle to that guy who find what is wrong in below code:
>unsigned int oct2int(const char* oct){
>unsigned int ret = 0;
>int i,j=0,k,length = strlen(oct);
>
> for(i=length-1;i >= 0;i--){
> if((k=(oct[i]-'0')) > 0) ret += 1<<(3*j) * k;//8 = 2^3
> j++;
> }
>return ret;
>}
>Compiler doesn't report error. However code is not working as expected
>To my supprise the code disables interrupts for ever.
>So the example should look like:
>unsigned int oct2int(const char* oct){
>unsigned int ret = 0;
>int i,j=0,k,length = strlen(oct);
>
> for(i=length-1;i >= 0;i--){
> if((k=(oct[i]-'0')) > 0) ret += 1<<(3*j) * k;//8 = 2^3
> j++;
> asm("eint");
> }
>return ret;
>}
>Who was so smart !!!
>Explanation below ( from assembly):
>unsigned int oct2int(const char* oct){
> 8c42: 0b 12 push r11 ;
> 8c44: 0a 12 push r10 ;
> 8c46: 09 12 push r9 ;
> 8c48: 08 12 push r8 ;
> 8c4a: 0c 4f mov r15, r12 ;
>unsigned int ret = 0;
> 8c4c: 0b 43 clr r11 ;
>int i,j=0,k,length = strlen(oct);
> 8c4e: 0a 43 clr r10 ;
> 8c50: 1f 83 dec r15 ;
> 8c52: 1f 53 inc r15 ;
> 8c54: cf 93 00 00 cmp.b #0, 0(r15) ;r3 As==00
> 8c58: fc 23 jnz $-6 ;abs 0x8c52
> 8c5a: 09 4f mov r15, r9 ;
> 8c5c: 09 8c sub r12, r9 ;
> for(i=length-1;i >= 0;i--){
> 8c5e: 0d 49 mov r9, r13 ;
> 8c60: 3d 53 add #-1, r13 ;r3 As==11
> 8c62: e7 38 jl $+464 ;abs 0x8e32
> 8c64: 18 43 mov #1, r8 ;r3 As==01
> 8c66: 0f 4d mov r13, r15 ;
> 8c68: 3f e3 inv r15 ;
> 8c6a: 0e 4f mov r15, r14 ;
> 8c6c: 3e f0 03 00 and #3, r14 ;#0x0003
> 8c70: 0f 49 mov r9, r15 ;
> 8c72: 2f 83 decd r15 ;
> 8c74: 42 38 jl $+134 ;abs 0x8cfa
> 8c76: 0e 93 cmp #0, r14 ;r3 As==00
> 8c78: 5f 24 jz $+192 ;abs 0x8d38
> 8c7a: 3e 90 03 00 cmp #3, r14 ;#0x0003
> 8c7e: 3d 34 jge $+124 ;abs 0x8cfa
> 8c80: 2e 93 cmp #2, r14 ;r3 As==10
> 8c82: 1d 34 jge $+60 ;abs 0x8cbe
> if((k=(oct[i]-'0')) > 0) ret += 1<<(3*j) * k;//8 = 2^3
> 8c84: 0f 4c mov r12, r15 ;
> 8c86: 0f 5d add r13, r15 ;
> 8c88: 6f 4f mov.b @r15, r15 ;
> 8c8a: 8f 11 sxt r15 ;
> 8c8c: 3f 50 d0 ff add #-48, r15 ;#0xffd0
> 8c90: 1f 93 cmp #1, r15 ;r3 As==01
> 8c92: 13 38 jl $+40 ;abs 0x8cba
> 8c94: 02 12 push r2 ;
> 8c96: 32 c2 dint
> 8c98: 03 43 nop
> 8c9a: 82 4b 32 01 mov r11, &0x0132 ;
> 8c9e: 82 4f 38 01 mov r15, &0x0138 ;
> 8ca2: 1e 42 3a 01 mov &0x013a,r14 ;0x013a
> 8ca6: 32 41 pop r2 ;
> 8ca8: 0f 4e mov r14, r15 ;
> 8caa: 0f 5f rla r15 ;
> 8cac: 0f 5e add r14, r15 ;
> 8cae: 0b 48 mov r8, r11 ;
> 8cb0: 0f 93 cmp #0, r15 ;r3 As==00
> 8cb2: 03 24 jz $+8 ;abs 0x8cba
> 8cb4: 0b 5b rla r11 ;
> 8cb6: 1f 83 dec r15 ;
> 8cb8: fd 23 jnz $-4 ;abs 0x8cb4
> j++;
> 8cba: 1a 53 inc r10 ;
> 8cbc: 3d 53 add #-1, r13 ;r3 As==11
> 8cbe: 0f 4c mov r12, r15 ;
> 8cc0: 0f 5d add r13, r15 ;
> 8cc2: 6f 4f mov.b @r15, r15 ;
> 8cc4: 8f 11 sxt r15 ;
> 8cc6: 3f 50 d0 ff add #-48, r15 ;#0xffd0
> 8cca: 1f 93 cmp #1, r15 ;r3 As==01
> 8ccc: 14 38 jl $+42 ;abs 0x8cf6
> 8cce: 02 12 push r2 ;
> 8cd0: 32 c2 dint
> 8cd2: 03 43 nop
> 8cd4: 82 4a 32 01 mov r10, &0x0132 ;
> 8cd8: 82 4f 38 01 mov r15, &0x0138 ;
> 8cdc: 1f 42 3a 01 mov &0x013a,r15 ;0x013a
> 8ce0: 32 41 pop r2 ;
> 8ce2: 0e 4f mov r15, r14 ;
> 8ce4: 0e 5e rla r14 ;
> 8ce6: 0e 5f add r15, r14 ;
> 8ce8: 0f 48 mov r8, r15 ;
> 8cea: 0e 93 cmp #0, r14 ;r3 As==00
> 8cec: 03 24 jz $+8 ;abs 0x8cf4
> 8cee: 0f 5f rla r15 ;
> 8cf0: 1e 83 dec r14 ;
> 8cf2: fd 23 jnz $-4 ;abs 0x8cee
> 8cf4: 0b 5f add r15, r11 ;
> 8cf6: 1a 53 inc r10 ;
> 8cf8: 3d 53 add #-1, r13 ;r3 As==11
> 8cfa: 0f 4c mov r12, r15 ;
> 8cfc: 0f 5d add r13, r15 ;
> 8cfe: 6f 4f mov.b @r15, r15 ;
> 8d00: 8f 11 sxt r15 ;
> 8d02: 3f 50 d0 ff add #-48, r15 ;#0xffd0
> 8d06: 1f 93 cmp #1, r15 ;r3 As==01
> 8d08: 14 38 jl $+42 ;abs 0x8d32
> 8d0a: 02 12 push r2 ;
> 8d0c: 32 c2 dint
> 8d0e: 03 43 nop
> 8d10: 82 4a 32 01 mov r10, &0x0132 ;
> 8d14: 82 4f 38 01 mov r15, &0x0138 ;
> 8d18: 1f 42 3a 01 mov &0x013a,r15 ;0x013a
> 8d1c: 32 41 pop r2 ;
> 8d1e: 0e 4f mov r15, r14 ;
> 8d20: 0e 5e rla r14 ;
> 8d22: 0e 5f add r15, r14 ;
> 8d24: 0f 48 mov r8, r15 ;
> 8d26: 0e 93 cmp #0, r14 ;r3 As==00
> 8d28: 03 24 jz $+8 ;abs 0x8d30
> 8d2a: 0f 5f rla r15 ;
> 8d2c: 1e 83 dec r14 ;
> 8d2e: fd 23 jnz $-4 ;abs 0x8d2a
> 8d30: 0b 5f add r15, r11 ;
> 8d32: 1a 53 inc r10 ;
> 8d34: 3d 53 add #-1, r13 ;r3 As==11
> 8d36: 7d 38 jl $+252 ;abs 0x8e32
> 8d38: 0f 4c mov r12, r15 ;
> 8d3a: 0f 5d add r13, r15 ;
> 8d3c: 6f 4f mov.b @r15, r15 ;
> 8d3e: 8f 11 sxt r15 ;
> 8d40: 3f 50 d0 ff add #-48, r15 ;#0xffd0
> 8d44: 1f 93 cmp #1, r15 ;r3 As==01
> 8d46: 14 38 jl $+42 ;abs 0x8d70
> 8d48: 02 12 push r2 ;
> 8d4a: 32 c2 dint
> 8d4c: 03 43 nop
> 8d4e: 82 4a 32 01 mov r10, &0x0132 ;
> 8d52: 82 4f 38 01 mov r15, &0x0138 ;
> 8d56: 1f 42 3a 01 mov &0x013a,r15 ;0x013a
> 8d5a: 32 41 pop r2 ;
> 8d5c: 0e 4f mov r15, r14 ;
> 8d5e: 0e 5e rla r14 ;
> 8d60: 0e 5f add r15, r14 ;
> 8d62: 0f 48 mov r8, r15 ;
> 8d64: 0e 93 cmp #0, r14 ;r3 As==00
> 8d66: 03 24 jz $+8 ;abs 0x8d6e
> 8d68: 0f 5f rla r15 ;
> 8d6a: 1e 83 dec r14 ;
> 8d6c: fd 23 jnz $-4 ;abs 0x8d68
> 8d6e: 0b 5f add r15, r11 ;
> 8d70: 0e 4a mov r10, r14 ;
> 8d72: 1e 53 inc r14 ;
> 8d74: 0f 4d mov r13, r15 ;
> 8d76: 0f 5c add r12, r15 ;
> 8d78: 5f 4f ff ff mov.b -1(r15),r15 ;
> 8d7c: 8f 11 sxt r15 ;
> 8d7e: 3f 50 d0 ff add #-48, r15 ;#0xffd0
> 8d82: 1f 93 cmp #1, r15 ;r3 As==01
> 8d84: 14 38 jl $+42 ;abs 0x8dae
> 8d86: 02 12 push r2 ;
> 8d88: 32 c2 dint
> 8d8a: 03 43 nop
> 8d8c: 82 4e 32 01 mov r14, &0x0132 ;
> 8d90: 82 4f 38 01 mov r15, &0x0138 ;
> 8d94: 1f 42 3a 01 mov &0x013a,r15 ;0x013a
> 8d98: 32 41 pop r2 ;
> 8d9a: 0e 4f mov r15, r14 ;
> 8d9c: 0e 5e rla r14 ;
> 8d9e: 0e 5f add r15, r14 ;
> 8da0: 0f 48 mov r8, r15 ;
> 8da2: 0e 93 cmp #0, r14 ;r3 As==00
> 8da4: 03 24 jz $+8 ;abs 0x8dac
> 8da6: 0f 5f rla r15 ;
> 8da8: 1e 83 dec r14 ;
> 8daa: fd 23 jnz $-4 ;abs 0x8da6
> 8dac: 0b 5f add r15, r11 ;
> 8dae: 0e 4a mov r10, r14 ;
> 8db0: 2e 53 incd r14 ;
> 8db2: 0f 4d mov r13, r15 ;
> 8db4: 0f 5c add r12, r15 ;
> 8db6: 5f 4f fe ff mov.b -2(r15),r15 ;
> 8dba: 8f 11 sxt r15 ;
> 8dbc: 3f 50 d0 ff add #-48, r15 ;#0xffd0
> 8dc0: 1f 93 cmp #1, r15 ;r3 As==01
> 8dc2: 14 38 jl $+42 ;abs 0x8dec
> 8dc4: 02 12 push r2 ;
> 8dc6: 32 c2 dint
> 8dc8: 03 43 nop
> 8dca: 82 4e 32 01 mov r14, &0x0132 ;
> 8dce: 82 4f 38 01 mov r15, &0x0138 ;
> 8dd2: 1f 42 3a 01 mov &0x013a,r15 ;0x013a
> 8dd6: 32 41 pop r2 ;
> 8dd8: 0e 4f mov r15, r14 ;
> 8dda: 0e 5e rla r14 ;
> 8ddc: 0e 5f add r15, r14 ;
> 8dde: 0f 48 mov r8, r15 ;
> 8de0: 0e 93 cmp #0, r14 ;r3 As==00
> 8de2: 03 24 jz $+8 ;abs 0x8dea
> 8de4: 0f 5f rla r15 ;
> 8de6: 1e 83 dec r14 ;
> 8de8: fd 23 jnz $-4 ;abs 0x8de4
> 8dea: 0b 5f add r15, r11 ;
> 8dec: 0e 4a mov r10, r14 ;
> 8dee: 3e 50 03 00 add #3, r14 ;#0x0003
> 8df2: 0f 4d mov r13, r15 ;
> 8df4: 0f 5c add r12, r15 ;
> 8df6: 5f 4f fd ff mov.b -3(r15),r15 ;
> 8dfa: 8f 11 sxt r15 ;
> 8dfc: 3f 50 d0 ff add #-48, r15 ;#0xffd0
> 8e00: 1f 93 cmp #1, r15 ;r3 As==01
> 8e02: 14 38 jl $+42 ;abs 0x8e2c
> 8e04: 02 12 push r2 ;
> 8e06: 32 c2 dint
> 8e08: 03 43 nop
> 8e0a: 82 4e 32 01 mov r14, &0x0132 ;
> 8e0e: 82 4f 38 01 mov r15, &0x0138 ;
> 8e12: 1f 42 3a 01 mov &0x013a,r15 ;0x013a
> 8e16: 32 41 pop r2 ;
> 8e18: 0e 4f mov r15, r14 ;
> 8e1a: 0e 5e rla r14 ;
> 8e1c: 0e 5f add r15, r14 ;
> 8e1e: 0f 48 mov r8, r15 ;
> 8e20: 0e 93 cmp #0, r14 ;r3 As==00
> 8e22: 03 24 jz $+8 ;abs 0x8e2a
> 8e24: 0f 5f rla r15 ;
> 8e26: 1e 83 dec r14 ;
> 8e28: fd 23 jnz $-4 ;abs 0x8e24
> 8e2a: 0b 5f add r15, r11 ;
> 8e2c: 2a 52 add #4, r10 ;r2 As==10
> 8e2e: 2d 82 sub #4, r13 ;r2 As==10
> 8e30: 83 37 jge $-248 ;abs 0x8d38
> }
>return ret;
>}
> 8e32: 0f 4b mov r11, r15 ;
> 8e34: 38 41 pop r8 ;
> 8e36: 39 41 pop r9 ;
> 8e38: 3a 41 pop r10 ;
> 8e3a: 3b 41 pop r11 ;
> 8e3c: 30 41 ret
>--
>------------------------
> Jeszcze lepsza oferta AlphaNet
> - nowe serwery wirtualne
> - nowy sklep internetowy
> - nowy wirtualny administrator
> Sprawdz www.alpha.pl
>------------------------