I have a std::string
with a large integer encoded in Base16:
bbb91c1c95b656f386b19ab284b9c0f66598e7761cd71569734bb72b6a7153b77613a6cef8e63
e9bd9bb1e0e53a0fd8fa2162b160fcb7b461689afddf098bfc32300cf6808960127f1d9f0e287
f948257f7e0574b56585dd1efe1192d784b9c93f9c2215bd4867062ea30f034265374fa013ab4
5af06cd8554fd55f1c442c2ed
I want a std::string
with a large integer encoded in Base10:
13182363340585954094154991955162141609757130565683854218475776626603716062690
50741824486137510938646762753180989129520441058729412931959771922633699694948
46611764803267065720664398942078304585998290003537553345030144535441671492050
01138054588415687622649540474976282005406232907125282540703919964112809484362
9
How do I convert the strings from Base16 to Base10?
Since you don't want a library, you need some code. It's not a trivial problem, but not too complex either. Let's start with my Bignum
class from another answer and add a couple of functions to it.
class Bignum
{
//...
Bignum& operator+=(int rhs)
{
assert(rhs >= 0 && rhs <= 999999999);
uint32_t carry = rhs;
for (size_t i = 0; i < parts.size(); i++)
{
uint32_t sum = parts[i] + carry;
parts[i] = (uint32_t)(sum % 1000000000UL);
carry = (uint32_t)(sum / 1000000000UL);
}
if (carry != 0)
parts.push_back(carry);
return *this;
}
void FromHex(const char * pString)
{
while (*pString != 0)
{
char ch = toupper(*pString++);
assert((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'F'));
int digit = (ch <= '9') ? (ch - '0') : (ch - 'A' + 10);
*this *= 16;
*this += digit;
}
}
See the whole thing in action: http://coliru.stacked-crooked.com/a/cb5061a00c945875
Convert encoded std::string from Base16 to Base10?
The following should work for you. The code below shows you how to do it with C-style strings, which is easy to conceptualize. Your previous question at Convert CryptoPP::Integer to LPCTSTR has the references.
#include <iostream>
#include <string>
using namespace std;
#include "cryptlib.h"
#include "integer.h"
using namespace CryptoPP;
int main(int argc, char* argv[])
{
string s2, s1 =
"bbb91c1c95b656f386b19ab284b9c0f66598e7761cd71569734bb72b6a7153b77613a6cef8e63"
"e9bd9bb1e0e53a0fd8fa2162b160fcb7b461689afddf098bfc32300cf6808960127f1d9f0e287"
"f948257f7e0574b56585dd1efe1192d784b9c93f9c2215bd4867062ea30f034265374fa013ab4"
"5af06cd8554fd55f1c442c2ed";
// Append 'h' to indicate Base16
// Integer n((s1 + "h").c_str());
// Prepend '0x' to indicate Base16
Integer n(("0x" + s1).c_str());
// Convert to Base10
s2 = IntToString<Integer>(n, 10);
cout << s2 << endl;
return 0;
}
The code above shows you how to do it with C-style strings, which is easy to conceptualize. Another way to do it uses a Crypto++ Pipeline
to convert the ASCII string into a big-endian array of bytes.
#include <iostream>
#include <string>
using namespace std;
#include "cryptlib.h"
#include "integer.h"
#include "filters.h"
#include "hex.h"
using namespace CryptoPP;
int main(int argc, char* argv[])
{
string s3, s2, s1 =
"bbb91c1c95b656f386b19ab284b9c0f66598e7761cd71569734bb72b6a7153b77613a6cef8e63"
"e9bd9bb1e0e53a0fd8fa2162b160fcb7b461689afddf098bfc32300cf6808960127f1d9f0e287"
"f948257f7e0574b56585dd1efe1192d784b9c93f9c2215bd4867062ea30f034265374fa013ab4"
"5af06cd8554fd55f1c442c2ed";
// Use a HexDecoder to convert to big-endian array
StringSource ss(s1, true, new HexDecoder(new StringSink(s2)));
// Use big-endian array to construct n
Integer n((const byte*)s2.data(), s2.size());
// Convert to Base10
s3 = IntToString<Integer>(n, 10);
cout << s3 << endl;
return 0;
}
Here's another way to perform the conversion using a Crypto++ Pipeline
.
#include <iostream>
#include <string>
using namespace std;
#include "cryptlib.h"
#include "integer.h"
#include "filters.h"
#include "hex.h"
using namespace CryptoPP;
int main(int argc, char* argv[])
{
string s2, s1 =
"bbb91c1c95b656f386b19ab284b9c0f66598e7761cd71569734bb72b6a7153b77613a6cef8e63"
"e9bd9bb1e0e53a0fd8fa2162b160fcb7b461689afddf098bfc32300cf6808960127f1d9f0e287"
"f948257f7e0574b56585dd1efe1192d784b9c93f9c2215bd4867062ea30f034265374fa013ab4"
"5af06cd8554fd55f1c442c2ed";
// Use a source to convert to big-endian array
StringSource ss(s1, true, new HexDecoder);
// Use big-endian array to construct n
Integer n;
n.Decode(ss, ss.MaxRetrievable());
// Convert to Base10
s2 = IntToString<Integer>(n, 10);
cout << s2 << endl;
return 0;
}
If you are interested in the algorithm that converts the ASCII string to a byte array for internal representation, then see StringToInteger
in integer.cpp
. It repeatedly divides by the base (2, 8, 10, 16, etc).