指针算法怎么不对?
using System;namespace loong.csharp.learn.pointer{
unsafe class TestPointer
{
public static void Main()
{
int a = 10;
long b = 2000;
int* pA = &a;
long* pB = &b; Console.WriteLine("Address of {0} is {1:D}", a, (uint)&a);
Console.WriteLine("Address of {0} is {1:D}", b, (uint)&b); Console.WriteLine("Address is {0:D} and value is {1}", (uint)&pA, *pA );
Console.WriteLine("Address is {0:D} and value is {1}", (uint)&pB, *pB ); pA += 2;
pB += 20000;
Console.WriteLine("Address is {0:D} and value is {1}", (uint)&pA, *pA );
Console.WriteLine("Address is {0:D} and value is {1}", (uint)&pB, *pB );
Console.ReadLine();
}
}
}
输出是:
address of 10 is 1242224
address of 2000 is 1242216
address of 1242212 and value is 10
address of 1242208 and value is 2000
address of 1242212 and value is 1429672
address of 1242212 and value is 2045194496
请问1429672 和 2045194496 是怎么计算出来的?
using System;namespace loong.csharp.learn.pointer{
unsafe class TestPointer
{
public static void Main()
{
int a = 10;
long b = 2000;
int* pA = &a;
long* pB = &b; Console.WriteLine("Address of {0} is {1:D}", a, (uint)&a);
Console.WriteLine("Address of {0} is {1:D}", b, (uint)&b); Console.WriteLine("Address is {0:D} and value is {1}", (uint)&pA, *pA );
Console.WriteLine("Address is {0:D} and value is {1}", (uint)&pB, *pB ); pA += 2;
pB += 20000;
Console.WriteLine("Address is {0:D} and value is {1}", (uint)&pA, *pA );
Console.WriteLine("Address is {0:D} and value is {1}", (uint)&pB, *pB );
Console.ReadLine();
}
}
}
输出是:
address of 10 is 1242224
address of 2000 is 1242216
address of 1242212 and value is 10
address of 1242208 and value is 2000
address of 1242212 and value is 1429672
address of 1242212 and value is 2045194496
请问1429672 和 2045194496 是怎么计算出来的?
address of 10 is 1242224
address of 2000 is 1242216
address is 1242212 and value is 10
address is 1242208 and value is 2000
address is 1242212 and value is 1429672
address is 1242208 and value is 2045194496 那两个数是pApB偏移后内存里的没意义的数.