Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
M
Magisk
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Administrator
Magisk
Commits
fd4aaab1
Commit
fd4aaab1
authored
Jun 15, 2017
by
topjohnwu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Rewrite zip signing
parent
42d14d5c
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
593 additions
and
1079 deletions
+593
-1079
ProcessRepoZip.java
...main/java/com/topjohnwu/magisk/asyncs/ProcessRepoZip.java
+12
-31
ManagerUpdate.java
...in/java/com/topjohnwu/magisk/receivers/ManagerUpdate.java
+0
-1
ZipUtils.java
app/src/main/java/com/topjohnwu/magisk/utils/ZipUtils.java
+426
-864
jni_glue.c
app/src/main/jni/jni_glue.c
+9
-50
zipadjust.c
app/src/main/jni/zipadjust.c
+145
-129
zipadjust.h
app/src/main/jni/zipadjust.h
+1
-4
No files found.
app/src/main/java/com/topjohnwu/magisk/asyncs/ProcessRepoZip.java
View file @
fd4aaab1
...
...
@@ -14,7 +14,6 @@ import com.topjohnwu.magisk.utils.ZipUtils;
import
java.io.File
;
import
java.io.FileInputStream
;
import
java.io.FileNotFoundException
;
import
java.io.FileOutputStream
;
import
java.io.OutputStream
;
public
class
ProcessRepoZip
extends
ParallelTask
<
Void
,
Void
,
Boolean
>
{
...
...
@@ -38,51 +37,33 @@ public class ProcessRepoZip extends ParallelTask<Void, Void, Boolean> {
@Override
protected
Boolean
doInBackground
(
Void
...
params
)
{
FileInputStream
in
;
FileOutputStream
out
;
try
{
// Create temp file
File
temp1
=
new
File
(
magiskManager
.
getCacheDir
(),
"1.zip"
);
File
temp2
=
new
File
(
magiskManager
.
getCacheDir
(),
"2.zip"
);
if
(
magiskManager
.
getCacheDir
().
mkdirs
())
{
temp1
.
createNewFile
();
temp2
.
createNewFile
();
}
out
=
new
FileOutputStream
(
temp1
);
magiskManager
.
getCacheDir
().
mkdirs
();
temp1
.
createNewFile
();
temp2
.
createNewFile
();
// First remove top folder in Github source zip, Uri -> temp1
ZipUtils
.
removeTopFolder
(
activity
.
getContentResolver
().
openInputStream
(
mUri
),
out
);
out
.
flush
();
out
.
close
();
out
=
new
FileOutputStream
(
temp2
);
ZipUtils
.
removeTopFolder
(
activity
.
getContentResolver
().
openInputStream
(
mUri
),
temp1
);
// Then sign the zip for the first time, temp1 -> temp2
ZipUtils
.
signZip
(
activity
,
temp1
,
out
,
false
);
out
.
flush
();
out
.
close
();
// Adjust the zip to prevent unzip issues, temp2 -> temp2
ZipUtils
.
adjustZip
(
temp2
);
out
=
new
FileOutputStream
(
temp1
);
ZipUtils
.
signZip
(
activity
,
temp1
,
temp2
,
false
);
// Finally, sign the whole zip file again, temp2 -> temp1
ZipUtils
.
signZip
(
activity
,
temp2
,
out
,
true
);
out
.
flush
();
out
.
close
();
// Adjust the zip to prevent unzip issues, temp2 -> temp1
ZipUtils
.
zipAdjust
(
temp2
.
getPath
(),
temp1
.
getPath
());
in
=
new
FileInputStream
(
temp1
);
// Finally, sign the whole zip file again, temp1 -> temp2
ZipUtils
.
signZip
(
activity
,
temp1
,
temp2
,
true
);
// Write it back to the downloaded zip, temp1 -> Uri
// Write it back to the downloaded zip, temp2 -> Uri
FileInputStream
in
=
new
FileInputStream
(
temp2
);
try
(
OutputStream
target
=
activity
.
getContentResolver
().
openOutputStream
(
mUri
))
{
byte
[]
buffer
=
new
byte
[
4096
];
int
length
;
if
(
target
==
null
)
throw
new
FileNotFoundException
();
if
(
target
==
null
)
throw
new
FileNotFoundException
();
while
((
length
=
in
.
read
(
buffer
))
>
0
)
target
.
write
(
buffer
,
0
,
length
);
}
...
...
app/src/main/java/com/topjohnwu/magisk/receivers/ManagerUpdate.java
View file @
fd4aaab1
...
...
@@ -7,7 +7,6 @@ import android.net.Uri;
import
android.os.Build
;
import
android.support.v4.content.FileProvider
;
import
com.topjohnwu.magisk.MagiskManager
;
import
com.topjohnwu.magisk.utils.Utils
;
import
java.io.File
;
...
...
app/src/main/java/com/topjohnwu/magisk/utils/ZipUtils.java
View file @
fd4aaab1
package
com
.
topjohnwu
.
magisk
.
utils
;
import
android.content.Context
;
import
android.util.Pair
;
import
org.spongycastle.asn1.ASN1InputStream
;
import
org.spongycastle.asn1.ASN1ObjectIdentifier
;
import
org.spongycastle.asn1.DEROutputStream
;
import
org.spongycastle.asn1.cms.CMSObjectIdentifiers
;
import
org.spongycastle.asn1.pkcs.PrivateKeyInfo
;
import
org.spongycastle.cert.jcajce.JcaCertStore
;
import
org.spongycastle.cms.CMSException
;
import
org.spongycastle.cms.CMSProcessableByteArray
;
...
...
@@ -21,8 +21,10 @@ import org.spongycastle.operator.jcajce.JcaContentSignerBuilder;
import
org.spongycastle.operator.jcajce.JcaDigestCalculatorProviderBuilder
;
import
org.spongycastle.util.encoders.Base64
;
import
java.io.ByteArrayInputStream
;
import
java.io.ByteArrayOutputStream
;
import
java.io.File
;
import
java.io.FileInputStream
;
import
java.io.FileOutputStream
;
import
java.io.FilterOutputStream
;
import
java.io.IOException
;
...
...
@@ -31,24 +33,19 @@ import java.io.OutputStream;
import
java.io.PrintStream
;
import
java.security.DigestOutputStream
;
import
java.security.GeneralSecurityException
;
import
java.security.Key
;
import
java.security.KeyFactory
;
import
java.security.MessageDigest
;
import
java.security.NoSuchAlgorithmException
;
import
java.security.PrivateKey
;
import
java.security.Provider
;
import
java.security.Security
;
import
java.security.cert.CertificateEncodingException
;
import
java.security.cert.CertificateFactory
;
import
java.security.cert.X509Certificate
;
import
java.security.spec.InvalidKeySpecException
;
import
java.security.spec.KeySpec
;
import
java.security.spec.PKCS8EncodedKeySpec
;
import
java.util.ArrayList
;
import
java.util.Arrays
;
import
java.util.Collections
;
import
java.util.Enumeration
;
import
java.util.Iterator
;
import
java.util.Locale
;
import
java.util.Map
;
import
java.util.TreeMap
;
...
...
@@ -60,50 +57,35 @@ import java.util.jar.JarOutputStream;
import
java.util.jar.Manifest
;
import
java.util.regex.Pattern
;
import
javax.crypto.Cipher
;
import
javax.crypto.EncryptedPrivateKeyInfo
;
import
javax.crypto.SecretKeyFactory
;
import
javax.crypto.spec.PBEKeySpec
;
/*
* Modified from from AOSP(Marshmallow) SignAPK.java
* */
public
class
ZipUtils
{
private
static
final
String
CERT_SF_NAME
=
"META-INF/CERT.SF"
;
private
static
final
String
CERT_SIG_NAME
=
"META-INF/CERT.%s"
;
private
static
final
String
OTACERT_NAME
=
"META-INF/com/android/otacert"
;
// File name in assets
private
static
final
String
PUBLIC_KEY_NAME
=
"public.certificate.x509.pem"
;
private
static
final
String
PRIVATE_KEY_NAME
=
"private.key.pk8"
;
private
static
final
String
CERT_SF_NAME
=
"META-INF/CERT.SF"
;
private
static
final
String
CERT_SIG_NAME
=
"META-INF/CERT.%s"
;
private
static
Provider
sBouncyCastleProvider
;
// bitmasks for which hash algorithms we need the manifest to include.
private
static
final
int
USE_SHA1
=
1
;
private
static
final
int
USE_SHA256
=
2
;
// Files matching this pattern are not copied to the output.
private
static
Pattern
stripPattern
=
Pattern
.
compile
(
"^(META-INF/((.*)[.](SF|RSA|DSA|EC)|com/android/otacert))|("
+
Pattern
.
quote
(
JarFile
.
MANIFEST_NAME
)
+
")$"
);
static
{
System
.
loadLibrary
(
"zipadjust"
);
sBouncyCastleProvider
=
new
BouncyCastleProvider
();
Security
.
insertProviderAt
(
sBouncyCastleProvider
,
1
);
System
.
loadLibrary
(
"zipadjust"
);
}
public
native
static
byte
[]
zipAdjust
(
byte
[]
bytes
,
int
size
);
public
native
static
void
zipAdjust
(
String
filename
);
// Wrapper function for the JNI function
public
static
void
adjustZip
(
ByteArrayInOutStream
buffer
)
{
buffer
.
setBuffer
(
zipAdjust
(
buffer
.
toByteArray
(),
buffer
.
size
()));
}
public
static
void
adjustZip
(
File
file
)
{
zipAdjust
(
file
.
getPath
());
}
public
native
static
void
zipAdjust
(
String
filenameIn
,
String
filenameOut
);
public
static
void
removeTopFolder
(
InputStream
in
,
OutputStream
o
ut
)
throws
IOException
{
public
static
void
removeTopFolder
(
InputStream
in
,
File
outp
ut
)
throws
IOException
{
try
{
JarInputStream
source
=
new
JarInputStream
(
in
);
JarOutputStream
dest
=
new
JarOutputStream
(
out
);
JarOutputStream
dest
=
new
JarOutputStream
(
new
FileOutputStream
(
output
)
);
JarEntry
entry
;
String
path
;
int
size
;
...
...
@@ -133,14 +115,6 @@ public class ZipUtils {
}
}
public
static
void
unzip
(
File
file
,
File
folder
)
throws
Exception
{
unzip
(
file
,
folder
,
""
);
}
public
static
void
unzip
(
InputStream
file
,
File
folder
)
throws
Exception
{
unzip
(
file
,
folder
,
""
);
}
public
static
void
unzip
(
File
file
,
File
folder
,
String
path
)
throws
Exception
{
int
count
;
FileOutputStream
out
;
...
...
@@ -163,7 +137,7 @@ public class ZipUtils {
dest
.
createNewFile
();
}
out
=
new
FileOutputStream
(
dest
);
while
((
count
=
is
.
read
(
data
,
0
,
4096
))
!=
-
1
)
{
while
((
count
=
is
.
read
(
data
))
!=
-
1
)
{
out
.
write
(
data
,
0
,
count
);
}
out
.
flush
();
...
...
@@ -176,95 +150,27 @@ public class ZipUtils {
}
}
public
static
void
unzip
(
InputStream
file
,
File
folder
,
String
path
)
throws
Exception
{
int
count
;
FileOutputStream
out
;
File
dest
;
JarEntry
entry
;
byte
data
[]
=
new
byte
[
4096
];
try
(
JarInputStream
zipfile
=
new
JarInputStream
(
file
))
{
while
((
entry
=
zipfile
.
getNextJarEntry
())
!=
null
)
{
if
(!
entry
.
getName
().
contains
(
path
)
||
entry
.
isDirectory
())
{
// Ignore directories, only create files
continue
;
}
Logger
.
dev
(
"ZipUtils: Extracting: "
+
entry
);
dest
=
new
File
(
folder
,
entry
.
getName
());
if
(
dest
.
getParentFile
().
mkdirs
())
{
dest
.
createNewFile
();
}
out
=
new
FileOutputStream
(
dest
);
while
((
count
=
zipfile
.
read
(
data
,
0
,
4096
))
!=
-
1
)
{
out
.
write
(
data
,
0
,
count
);
}
out
.
flush
();
out
.
close
();
}
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
throw
e
;
}
}
public
static
void
signZip
(
Context
context
,
File
input
,
OutputStream
outputStream
,
boolean
signWholeFile
)
throws
Exception
{
JarFile
inputJar
=
new
JarFile
(
input
);
public
static
void
signZip
(
Context
context
,
File
input
,
File
output
,
boolean
minSign
)
{
int
alignment
=
4
;
JarFile
inputJar
=
null
;
FileOutputStream
outputFile
=
null
;
int
hashes
=
0
;
try
{
X509Certificate
publicKey
=
GeneralUtils
.
readPublicKey
(
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
));
hashes
|=
FileUtils
.
getDigestAlgorithm
(
publicKey
);
// Set the ZIP file timestamp to the starting valid time
// of the 0th certificate plus one hour (to match what
// we've historically done).
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
PrivateKey
privateKey
=
GeneralUtils
.
readPrivateKey
(
context
.
getAssets
().
open
(
PRIVATE_KEY_NAME
));
if
(
signWholeFile
)
{
if
(!
"RSA"
.
equalsIgnoreCase
(
privateKey
.
getAlgorithm
()))
{
throw
new
IOException
(
"Cannot sign OTA packages with non-RSA keys"
);
}
FileUtils
.
signWholeFile
(
inputJar
,
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
),
publicKey
,
privateKey
,
outputStream
);
}
else
{
JarOutputStream
outputJar
=
new
JarOutputStream
(
outputStream
);
// For signing .apks, use the maximum compression to make
// them as small as possible (since they live forever on
// the system partition). For OTA packages, use the
// default compression level, which is much much faster
// and produces output that is only a tiny bit larger
// (~0.1% on full OTA packages I tested).
outputJar
.
setLevel
(
9
);
Manifest
manifest
=
FileUtils
.
addDigestsToManifest
(
inputJar
,
hashes
);
FileUtils
.
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
);
GeneralUtils
.
signFile
(
manifest
,
publicKey
,
privateKey
,
outputJar
);
outputJar
.
close
();
}
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
throw
e
;
}
}
X509Certificate
publicKey
=
readPublicKey
(
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
));
hashes
|=
getDigestAlgorithm
(
publicKey
);
public
static
void
signZip
(
Context
context
,
InputStream
inputStream
,
OutputStream
outputStream
,
boolean
signWholeFile
)
throws
Exception
{
StreamUtils
.
JarMap
inputJar
;
int
hashes
=
0
;
try
{
X509Certificate
publicKey
=
GeneralUtils
.
readPublicKey
(
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
));
hashes
|=
FileUtils
.
getDigestAlgorithm
(
publicKey
);
// Set the ZIP file timestamp to the starting valid time
// of the 0th certificate plus one hour (to match what
// we've historically done).
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
PrivateKey
privateKey
=
GeneralUtils
.
readPrivateKey
(
context
.
getAssets
().
open
(
PRIVATE_KEY_NAME
));
inputJar
=
new
StreamUtils
.
JarMap
(
new
JarInputStream
(
inputStream
));
if
(
signWholeFile
)
{
if
(!
"RSA"
.
equalsIgnoreCase
(
privateKey
.
getAlgorithm
()))
{
throw
new
IOException
(
"Cannot sign OTA packages with non-RSA keys"
);
}
StreamUtils
.
signWholeFile
(
inputJar
,
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
),
publicKey
,
privateKey
,
outputStream
);
PrivateKey
privateKey
=
readPrivateKey
(
context
.
getAssets
().
open
(
PRIVATE_KEY_NAME
));
outputFile
=
new
FileOutputStream
(
output
);
if
(
minSign
)
{
ZipUtils
.
signWholeFile
(
input
,
publicKey
,
privateKey
,
outputFile
);
}
else
{
JarOutputStream
outputJar
=
new
JarOutputStream
(
outputStream
);
inputJar
=
new
JarFile
(
input
,
false
);
// Don't verify.
JarOutputStream
outputJar
=
new
JarOutputStream
(
outputFile
);
// For signing .apks, use the maximum compression to make
// them as small as possible (since they live forever on
// the system partition). For OTA packages, use the
...
...
@@ -272,794 +178,450 @@ public class ZipUtils {
// and produces output that is only a tiny bit larger
// (~0.1% on full OTA packages I tested).
outputJar
.
setLevel
(
9
);
Manifest
manifest
=
StreamUtils
.
addDigestsToManifest
(
inputJar
,
hashes
);
StreamUtils
.
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
);
GeneralUtils
.
signFile
(
manifest
,
publicKey
,
privateKey
,
outputJar
);
Manifest
manifest
=
addDigestsToManifest
(
inputJar
,
hashes
);
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
,
alignment
);
signFile
(
manifest
,
inputJar
,
publicKey
,
privateKey
,
outputJar
);
outputJar
.
close
();
}
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
throw
e
;
}
finally
{
try
{
if
(
inputJar
!=
null
)
inputJar
.
close
();
if
(
outputFile
!=
null
)
outputFile
.
close
();
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
}
}
// This class host general functions
public
static
class
GeneralUtils
{
/** Returns the expected signature algorithm for this key type. */
private
static
String
getSignatureAlgorithm
(
X509Certificate
cert
)
{
String
sigAlg
=
cert
.
getSigAlgName
().
toUpperCase
(
Locale
.
US
);
String
keyType
=
cert
.
getPublicKey
().
getAlgorithm
().
toUpperCase
(
Locale
.
US
);
if
(
"RSA"
.
equalsIgnoreCase
(
keyType
))
{
if
(
FileUtils
.
getDigestAlgorithm
(
cert
)
==
USE_SHA256
)
{
return
"SHA256withRSA"
;
}
else
{
return
"SHA1withRSA"
;
}
}
else
if
(
"DSA"
.
equalsIgnoreCase
(
keyType
))
{
return
"SHA256withDSA"
;
}
else
if
(
"EC"
.
equalsIgnoreCase
(
keyType
))
{
return
"SHA256withECDSA"
;
/**
* Return one of USE_SHA1 or USE_SHA256 according to the signature
* algorithm specified in the cert.
*/
private
static
int
getDigestAlgorithm
(
X509Certificate
cert
)
{
String
sigAlg
=
cert
.
getSigAlgName
().
toUpperCase
(
Locale
.
US
);
if
(
"SHA1WITHRSA"
.
equals
(
sigAlg
)
||
"MD5WITHRSA"
.
equals
(
sigAlg
))
{
// see "HISTORICAL NOTE" above.
return
USE_SHA1
;
}
else
if
(
sigAlg
.
startsWith
(
"SHA256WITH"
))
{
return
USE_SHA256
;
}
else
{
throw
new
IllegalArgumentException
(
"unsupported signature algorithm \""
+
sigAlg
+
"\" in cert ["
+
cert
.
getSubjectDN
());
}
}
/** Returns the expected signature algorithm for this key type. */
private
static
String
getSignatureAlgorithm
(
X509Certificate
cert
)
{
String
sigAlg
=
cert
.
getSigAlgName
().
toUpperCase
(
Locale
.
US
);
String
keyType
=
cert
.
getPublicKey
().
getAlgorithm
().
toUpperCase
(
Locale
.
US
);
if
(
"RSA"
.
equalsIgnoreCase
(
keyType
))
{
if
(
getDigestAlgorithm
(
cert
)
==
USE_SHA256
)
{
return
"SHA256withRSA"
;
}
else
{
throw
new
IllegalArgumentException
(
"unsupported key type: "
+
keyType
)
;
return
"SHA1withRSA"
;
}
}
else
if
(
"EC"
.
equalsIgnoreCase
(
keyType
))
{
return
"SHA256withECDSA"
;
}
else
{
throw
new
IllegalArgumentException
(
"unsupported key type: "
+
keyType
);
}
private
static
X509Certificate
readPublicKey
(
InputStream
input
)
throws
IOException
,
GeneralSecurityException
{
try
{
CertificateFactory
cf
=
CertificateFactory
.
getInstance
(
"X.509"
);
return
(
X509Certificate
)
cf
.
generateCertificate
(
input
);
}
finally
{
input
.
close
();
}
}
// Files matching this pattern are not copied to the output.
private
static
Pattern
stripPattern
=
Pattern
.
compile
(
"^(META-INF/((.*)[.](SF|RSA|DSA|EC)|com/android/otacert))|("
+
Pattern
.
quote
(
JarFile
.
MANIFEST_NAME
)
+
")$"
);
private
static
X509Certificate
readPublicKey
(
InputStream
input
)
throws
IOException
,
GeneralSecurityException
{
try
{
CertificateFactory
cf
=
CertificateFactory
.
getInstance
(
"X.509"
);
return
(
X509Certificate
)
cf
.
generateCertificate
(
input
);
}
finally
{
input
.
close
();
}
}
/**
* Decrypt an encrypted PKCS 8 format private key.
*
* Based on ghstark's post on Aug 6, 2006 at
* http://forums.sun.com/thread.jspa?threadID=758133&messageID=4330949
*
* @param encryptedPrivateKey The raw data of the private key
* @param keyFile The file containing the private key
*/
private
static
KeySpec
decryptPrivateKey
(
byte
[]
encryptedPrivateKey
,
File
keyFile
)
throws
GeneralSecurityException
{
EncryptedPrivateKeyInfo
epkInfo
;
try
{
epkInfo
=
new
EncryptedPrivateKeyInfo
(
encryptedPrivateKey
);
}
catch
(
IOException
ex
)
{
// Probably not an encrypted key.
return
null
;
}
// We no longer have console, so need to use another way to input password
// This function is left here if needed in the future, so no use for now
char
[]
password
=
new
char
[
0
];
SecretKeyFactory
skFactory
=
SecretKeyFactory
.
getInstance
(
epkInfo
.
getAlgName
());
Key
key
=
skFactory
.
generateSecret
(
new
PBEKeySpec
(
password
));
Cipher
cipher
=
Cipher
.
getInstance
(
epkInfo
.
getAlgName
());
cipher
.
init
(
Cipher
.
DECRYPT_MODE
,
key
,
epkInfo
.
getAlgParameters
());
try
{
return
epkInfo
.
getKeySpec
(
cipher
);
}
catch
(
InvalidKeySpecException
ex
)
{
System
.
err
.
println
(
"signapk: Password for "
+
keyFile
+
" may be bad."
);
throw
ex
;
}
/** Read a PKCS#8 format private key. */
private
static
PrivateKey
readPrivateKey
(
InputStream
input
)
throws
IOException
,
GeneralSecurityException
{
try
{
byte
[]
buffer
=
new
byte
[
4096
];
int
size
=
input
.
read
(
buffer
);
byte
[]
bytes
=
Arrays
.
copyOf
(
buffer
,
size
);
/* Check to see if this is in an EncryptedPrivateKeyInfo structure. */
PKCS8EncodedKeySpec
spec
=
new
PKCS8EncodedKeySpec
(
bytes
);
/*
* Now it's in a PKCS#8 PrivateKeyInfo structure. Read its Algorithm
* OID and use that to construct a KeyFactory.
*/
ASN1InputStream
bIn
=
new
ASN1InputStream
(
new
ByteArrayInputStream
(
spec
.
getEncoded
()));
PrivateKeyInfo
pki
=
PrivateKeyInfo
.
getInstance
(
bIn
.
readObject
());
String
algOid
=
pki
.
getPrivateKeyAlgorithm
().
getAlgorithm
().
getId
();
return
KeyFactory
.
getInstance
(
algOid
).
generatePrivate
(
spec
);
}
finally
{
input
.
close
();
}
/** Read a PKCS 8 format private key. */
private
static
PrivateKey
readPrivateKey
(
InputStream
input
)
throws
IOException
,
GeneralSecurityException
{
try
{
byte
[]
buffer
=
new
byte
[
4096
];
int
size
=
input
.
read
(
buffer
);
byte
[]
bytes
=
Arrays
.
copyOf
(
buffer
,
size
);
KeySpec
spec
=
new
PKCS8EncodedKeySpec
(
bytes
);
PrivateKey
key
;
key
=
decodeAsKeyType
(
spec
,
"RSA"
);
if
(
key
!=
null
)
{
return
key
;
}
/**
* Add the hash(es) of every file to the manifest, creating it if
* necessary.
*/
private
static
Manifest
addDigestsToManifest
(
JarFile
jar
,
int
hashes
)
throws
IOException
,
GeneralSecurityException
{
Manifest
input
=
jar
.
getManifest
();
Manifest
output
=
new
Manifest
();
Attributes
main
=
output
.
getMainAttributes
();
if
(
input
!=
null
)
{
main
.
putAll
(
input
.
getMainAttributes
());
}
else
{
main
.
putValue
(
"Manifest-Version"
,
"1.0"
);
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
}
MessageDigest
md_sha1
=
null
;
MessageDigest
md_sha256
=
null
;
if
((
hashes
&
USE_SHA1
)
!=
0
)
{
md_sha1
=
MessageDigest
.
getInstance
(
"SHA1"
);
}
if
((
hashes
&
USE_SHA256
)
!=
0
)
{
md_sha256
=
MessageDigest
.
getInstance
(
"SHA256"
);
}
byte
[]
buffer
=
new
byte
[
4096
];
int
num
;
// We sort the input entries by name, and add them to the
// output manifest in sorted order. We expect that the output
// map will be deterministic.
TreeMap
<
String
,
JarEntry
>
byName
=
new
TreeMap
<
String
,
JarEntry
>();
for
(
Enumeration
<
JarEntry
>
e
=
jar
.
entries
();
e
.
hasMoreElements
();
)
{
JarEntry
entry
=
e
.
nextElement
();
byName
.
put
(
entry
.
getName
(),
entry
);
}
for
(
JarEntry
entry:
byName
.
values
())
{
String
name
=
entry
.
getName
();
if
(!
entry
.
isDirectory
()
&&
(
stripPattern
==
null
||
!
stripPattern
.
matcher
(
name
).
matches
()))
{
InputStream
data
=
jar
.
getInputStream
(
entry
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
if
(
md_sha1
!=
null
)
md_sha1
.
update
(
buffer
,
0
,
num
);
if
(
md_sha256
!=
null
)
md_sha256
.
update
(
buffer
,
0
,
num
);
}
key
=
decodeAsKeyType
(
spec
,
"DSA"
);
if
(
key
!=
null
)
{
return
key
;
Attributes
attr
=
null
;
if
(
input
!=
null
)
attr
=
input
.
getAttributes
(
name
);
attr
=
attr
!=
null
?
new
Attributes
(
attr
)
:
new
Attributes
();
if
(
md_sha1
!=
null
)
{
attr
.
putValue
(
"SHA1-Digest"
,
new
String
(
Base64
.
encode
(
md_sha1
.
digest
()),
"ASCII"
));
}
key
=
decodeAsKeyType
(
spec
,
"EC"
);
if
(
key
!=
null
)
{
return
key
;
if
(
md_sha256
!=
null
)
{
attr
.
putValue
(
"SHA-256-Digest"
,
new
String
(
Base64
.
encode
(
md_sha256
.
digest
()),
"ASCII"
))
;
}
throw
new
NoSuchAlgorithmException
(
"Must be an RSA, DSA, or EC key"
);
}
finally
{
input
.
close
();
output
.
getEntries
().
put
(
name
,
attr
);
}
}
return
output
;
}
private
static
PrivateKey
decodeAsKeyType
(
KeySpec
spec
,
String
keyType
)
throws
GeneralSecurityException
{
try
{
return
KeyFactory
.
getInstance
(
keyType
).
generatePrivate
(
spec
);
}
catch
(
InvalidKeySpecException
e
)
{
return
null
;
}
/** Write to another stream and track how many bytes have been
* written.
*/
private
static
class
CountOutputStream
extends
FilterOutputStream
{
private
int
mCount
;
public
CountOutputStream
(
OutputStream
out
)
{
super
(
out
);
mCount
=
0
;
}
/**
* Add a copy of the public key to the archive; this should
* exactly match one of the files in
* /system/etc/security/otacerts.zip on the device. (The same
* cert can be extracted from the CERT.RSA file but this is much
* easier to get at.)
*/
private
static
void
addOtacert
(
JarOutputStream
outputJar
,
InputStream
input
,
long
timestamp
,
Manifest
manifest
,
int
hash
)
throws
IOException
,
GeneralSecurityException
{
MessageDigest
md
=
MessageDigest
.
getInstance
(
hash
==
USE_SHA1
?
"SHA1"
:
"SHA256"
);
JarEntry
je
=
new
JarEntry
(
OTACERT_NAME
);
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
byte
[]
b
=
new
byte
[
4096
];
int
read
;
while
((
read
=
input
.
read
(
b
))
!=
-
1
)
{
outputJar
.
write
(
b
,
0
,
read
);
md
.
update
(
b
,
0
,
read
);
}
input
.
close
();
Attributes
attr
=
new
Attributes
();
attr
.
putValue
(
hash
==
USE_SHA1
?
"SHA1-Digest"
:
"SHA-256-Digest"
,
new
String
(
Base64
.
encode
(
md
.
digest
()),
"ASCII"
));
manifest
.
getEntries
().
put
(
OTACERT_NAME
,
attr
);
@Override
public
void
write
(
int
b
)
throws
IOException
{
super
.
write
(
b
);
mCount
++;
}
/** Write a .SF file with a digest of the specified manifest. */
private
static
void
writeSignatureFile
(
Manifest
manifest
,
OutputStream
out
,
int
hash
)
throws
IOException
,
GeneralSecurityException
{
Manifest
sf
=
new
Manifest
();
Attributes
main
=
sf
.
getMainAttributes
();
main
.
putValue
(
"Signature-Version"
,
"1.0"
);
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
MessageDigest
md
=
MessageDigest
.
getInstance
(
hash
==
USE_SHA256
?
"SHA256"
:
"SHA1"
);
PrintStream
print
=
new
PrintStream
(
new
DigestOutputStream
(
new
ByteArrayOutputStream
(),
md
),
true
,
"UTF-8"
);
// Digest of the entire manifest
manifest
.
write
(
print
);
@Override
public
void
write
(
byte
[]
b
,
int
off
,
int
len
)
throws
IOException
{
super
.
write
(
b
,
off
,
len
);
mCount
+=
len
;
}
public
int
size
()
{
return
mCount
;
}
}
/** Write a .SF file with a digest of the specified manifest. */
private
static
void
writeSignatureFile
(
Manifest
manifest
,
OutputStream
out
,
int
hash
)
throws
IOException
,
GeneralSecurityException
{
Manifest
sf
=
new
Manifest
();
Attributes
main
=
sf
.
getMainAttributes
();
main
.
putValue
(
"Signature-Version"
,
"1.0"
);
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
MessageDigest
md
=
MessageDigest
.
getInstance
(
hash
==
USE_SHA256
?
"SHA256"
:
"SHA1"
);
PrintStream
print
=
new
PrintStream
(
new
DigestOutputStream
(
new
ByteArrayOutputStream
(),
md
),
true
,
"UTF-8"
);
// Digest of the entire manifest
manifest
.
write
(
print
);
print
.
flush
();
main
.
putValue
(
hash
==
USE_SHA256
?
"SHA-256-Digest-Manifest"
:
"SHA1-Digest-Manifest"
,
new
String
(
Base64
.
encode
(
md
.
digest
()),
"ASCII"
));
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
for
(
Map
.
Entry
<
String
,
Attributes
>
entry
:
entries
.
entrySet
())
{
// Digest of the manifest stanza for this entry.
print
.
print
(
"Name: "
+
entry
.
getKey
()
+
"\r\n"
);
for
(
Map
.
Entry
<
Object
,
Object
>
att
:
entry
.
getValue
().
entrySet
())
{
print
.
print
(
att
.
getKey
()
+
": "
+
att
.
getValue
()
+
"\r\n"
);
}
print
.
print
(
"\r\n"
);
print
.
flush
();
main
.
putValue
(
hash
==
USE_SHA256
?
"SHA-256-Digest-Manifest"
:
"SHA1-Digest-Manifest"
,
Attributes
sfAttr
=
new
Attributes
();
sfAttr
.
putValue
(
hash
==
USE_SHA256
?
"SHA-256-Digest"
:
"SHA1-Digest-Manifest"
,
new
String
(
Base64
.
encode
(
md
.
digest
()),
"ASCII"
));
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
for
(
Map
.
Entry
<
String
,
Attributes
>
entry
:
entries
.
entrySet
())
{
// Digest of the manifest stanza for this entry.
print
.
print
(
"Name: "
+
entry
.
getKey
()
+
"\r\n"
);
for
(
Map
.
Entry
<
Object
,
Object
>
att
:
entry
.
getValue
().
entrySet
())
{
print
.
print
(
att
.
getKey
()
+
": "
+
att
.
getValue
()
+
"\r\n"
);
}
print
.
print
(
"\r\n"
);
print
.
flush
();
Attributes
sfAttr
=
new
Attributes
();
sfAttr
.
putValue
(
hash
==
USE_SHA256
?
"SHA-256-Digest"
:
"SHA1-Digest-Manifest"
,
new
String
(
Base64
.
encode
(
md
.
digest
()),
"ASCII"
));
sf
.
getEntries
().
put
(
entry
.
getKey
(),
sfAttr
);
}
CountOutputStream
cout
=
new
CountOutputStream
(
out
);
sf
.
write
(
cout
);
// A bug in the java.util.jar implementation of Android platforms
// up to version 1.6 will cause a spurious IOException to be thrown
// if the length of the signature file is a multiple of 1024 bytes.
// As a workaround, add an extra CRLF in this case.
if
((
cout
.
size
()
%
1024
)
==
0
)
{
cout
.
write
(
'\r'
);
cout
.
write
(
'\n'
);
}
sf
.
getEntries
().
put
(
entry
.
getKey
(),
sfAttr
);
}
/** Sign data and write the digital signature to 'out'. */
private
static
void
writeSignatureBlock
(
CMSTypedData
data
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
out
)
throws
IOException
,
CertificateEncodingException
,
OperatorCreationException
,
CMSException
{
ArrayList
<
X509Certificate
>
certList
=
new
ArrayList
<
X509Certificate
>(
1
);
certList
.
add
(
publicKey
);
JcaCertStore
certs
=
new
JcaCertStore
(
certList
);
CMSSignedDataGenerator
gen
=
new
CMSSignedDataGenerator
();
ContentSigner
signer
=
new
JcaContentSignerBuilder
(
getSignatureAlgorithm
(
publicKey
))
.
setProvider
(
sBouncyCastleProvider
)
.
build
(
privateKey
);
gen
.
addSignerInfoGenerator
(
new
JcaSignerInfoGeneratorBuilder
(
new
JcaDigestCalculatorProviderBuilder
()
.
setProvider
(
sBouncyCastleProvider
)
.
build
())
.
setDirectSignature
(
true
)
.
build
(
signer
,
publicKey
));
gen
.
addCertificates
(
certs
);
CMSSignedData
sigData
=
gen
.
generate
(
data
,
false
);
try
(
ASN1InputStream
asn1
=
new
ASN1InputStream
(
sigData
.
getEncoded
()))
{
DEROutputStream
dos
=
new
DEROutputStream
(
out
);
dos
.
writeObject
(
asn1
.
readObject
());
}
CountOutputStream
cout
=
new
CountOutputStream
(
out
);
sf
.
write
(
cout
);
// A bug in the java.util.jar implementation of Android platforms
// up to version 1.6 will cause a spurious IOException to be thrown
// if the length of the signature file is a multiple of 1024 bytes.
// As a workaround, add an extra CRLF in this case.
if
((
cout
.
size
()
%
1024
)
==
0
)
{
cout
.
write
(
'\r'
);
cout
.
write
(
'\n'
);
}
private
static
void
signFile
(
Manifest
manifest
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
JarOutputStream
outputJar
)
throws
Exception
{
// Assume the certificate is valid for at least an hour.
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
// MANIFEST.MF
JarEntry
je
=
new
JarEntry
(
JarFile
.
MANIFEST_NAME
);
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
manifest
.
write
(
outputJar
);
// CERT.SF / CERT#.SF
je
=
new
JarEntry
(
CERT_SF_NAME
);
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
ByteArrayOutputStream
baos
=
new
ByteArrayOutputStream
();
writeSignatureFile
(
manifest
,
baos
,
FileUtils
.
getDigestAlgorithm
(
publicKey
));
byte
[]
signedData
=
baos
.
toByteArray
();
outputJar
.
write
(
signedData
);
// CERT.{DSA,EC,RSA} / CERT#.{DSA,EC,RSA}
je
=
new
JarEntry
((
String
.
format
(
CERT_SIG_NAME
,
privateKey
.
getAlgorithm
())));
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
writeSignatureBlock
(
new
CMSProcessableByteArray
(
signedData
),
publicKey
,
privateKey
,
outputJar
);
}
/** Sign data and write the digital signature to 'out'. */
private
static
void
writeSignatureBlock
(
CMSTypedData
data
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
out
)
throws
IOException
,
CertificateEncodingException
,
OperatorCreationException
,
CMSException
{
ArrayList
<
X509Certificate
>
certList
=
new
ArrayList
<>(
1
);
certList
.
add
(
publicKey
);
JcaCertStore
certs
=
new
JcaCertStore
(
certList
);
CMSSignedDataGenerator
gen
=
new
CMSSignedDataGenerator
();
ContentSigner
signer
=
new
JcaContentSignerBuilder
(
getSignatureAlgorithm
(
publicKey
))
.
setProvider
(
sBouncyCastleProvider
)
.
build
(
privateKey
);
gen
.
addSignerInfoGenerator
(
new
JcaSignerInfoGeneratorBuilder
(
new
JcaDigestCalculatorProviderBuilder
()
.
setProvider
(
sBouncyCastleProvider
)
.
build
())
.
setDirectSignature
(
true
)
.
build
(
signer
,
publicKey
));
gen
.
addCertificates
(
certs
);
CMSSignedData
sigData
=
gen
.
generate
(
data
,
false
);
ASN1InputStream
asn1
=
new
ASN1InputStream
(
sigData
.
getEncoded
());
DEROutputStream
dos
=
new
DEROutputStream
(
out
);
dos
.
writeObject
(
asn1
.
readObject
());
}
/**
* Copy all the files in a manifest from input to output. We set
* the modification times in the output to a fixed time, so as to
* reduce variation in the output file and make incremental OTAs
* more efficient.
*/
private
static
void
copyFiles
(
Manifest
manifest
,
JarFile
in
,
JarOutputStream
out
,
long
timestamp
,
int
alignment
)
throws
IOException
{
byte
[]
buffer
=
new
byte
[
4096
];
int
num
;
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
ArrayList
<
String
>
names
=
new
ArrayList
<
String
>(
entries
.
keySet
());
Collections
.
sort
(
names
);
boolean
firstEntry
=
true
;
long
offset
=
0L
;
// We do the copy in two passes -- first copying all the
// entries that are STORED, then copying all the entries that
// have any other compression flag (which in practice means
// DEFLATED). This groups all the stored entries together at
// the start of the file and makes it easier to do alignment
// on them (since only stored entries are aligned).
for
(
String
name
:
names
)
{
JarEntry
inEntry
=
in
.
getJarEntry
(
name
);
JarEntry
outEntry
=
null
;
if
(
inEntry
.
getMethod
()
!=
JarEntry
.
STORED
)
continue
;
// Preserve the STORED method of the input entry.
outEntry
=
new
JarEntry
(
inEntry
);
outEntry
.
setTime
(
timestamp
);
// 'offset' is the offset into the file at which we expect
// the file data to begin. This is the value we need to
// make a multiple of 'alignement'.
offset
+=
JarFile
.
LOCHDR
+
outEntry
.
getName
().
length
();
if
(
firstEntry
)
{
// The first entry in a jar file has an extra field of
// four bytes that you can't get rid of; any extra
// data you specify in the JarEntry is appended to
// these forced four bytes. This is JAR_MAGIC in
// JarOutputStream; the bytes are 0xfeca0000.
offset
+=
4
;
firstEntry
=
false
;
}
if
(
alignment
>
0
&&
(
offset
%
alignment
!=
0
))
{
// Set the "extra data" of the entry to between 1 and
// alignment-1 bytes, to make the file data begin at
// an aligned offset.
int
needed
=
alignment
-
(
int
)(
offset
%
alignment
);
outEntry
.
setExtra
(
new
byte
[
needed
]);
offset
+=
needed
;
}
out
.
putNextEntry
(
outEntry
);
InputStream
data
=
in
.
getInputStream
(
inEntry
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
out
.
write
(
buffer
,
0
,
num
);
offset
+=
num
;
}
out
.
flush
();
}
/** Write to another stream and track how many bytes have been
* written.
*/
private
static
class
CountOutputStream
extends
FilterOutputStream
{
private
int
mCount
;
public
CountOutputStream
(
OutputStream
out
)
{
super
(
out
);
mCount
=
0
;
}
@Override
public
void
write
(
int
b
)
throws
IOException
{
super
.
write
(
b
);
mCount
++;
}
@Override
public
void
write
(
byte
[]
b
,
int
off
,
int
len
)
throws
IOException
{
super
.
write
(
b
,
off
,
len
);
mCount
+=
len
;
}
public
int
size
()
{
return
mCount
;
}
}
private
static
class
WholeFileSignerOutputStream
extends
FilterOutputStream
{
private
boolean
closing
=
false
;
private
ByteArrayOutputStream
footer
=
new
ByteArrayOutputStream
();
private
OutputStream
tee
;
public
WholeFileSignerOutputStream
(
OutputStream
out
,
OutputStream
tee
)
{
super
(
out
);
this
.
tee
=
tee
;
}
public
void
notifyClosing
()
{
closing
=
true
;
}
public
void
finish
()
throws
IOException
{
closing
=
false
;
byte
[]
data
=
footer
.
toByteArray
();
if
(
data
.
length
<
2
)
throw
new
IOException
(
"Less than two bytes written to footer"
);
write
(
data
,
0
,
data
.
length
-
2
);
}
public
byte
[]
getTail
()
{
return
footer
.
toByteArray
();
}
@Override
public
void
write
(
byte
[]
b
)
throws
IOException
{
write
(
b
,
0
,
b
.
length
);
}
@Override
public
void
write
(
byte
[]
b
,
int
off
,
int
len
)
throws
IOException
{
if
(
closing
)
{
// if the jar is about to close, save the footer that will be written
footer
.
write
(
b
,
off
,
len
);
}
else
{
// write to both output streams. out is the CMSTypedData signer and tee is the file.
out
.
write
(
b
,
off
,
len
);
tee
.
write
(
b
,
off
,
len
);
}
}
@Override
public
void
write
(
int
b
)
throws
IOException
{
if
(
closing
)
{
// if the jar is about to close, save the footer that will be written
footer
.
write
(
b
);
}
else
{
// write to both output streams. out is the CMSTypedData signer and tee is the file.
out
.
write
(
b
);
tee
.
write
(
b
);
}
}
// Copy all the non-STORED entries. We don't attempt to
// maintain the 'offset' variable past this point; we don't do
// alignment on these entries.
for
(
String
name
:
names
)
{
JarEntry
inEntry
=
in
.
getJarEntry
(
name
);
JarEntry
outEntry
=
null
;
if
(
inEntry
.
getMethod
()
==
JarEntry
.
STORED
)
continue
;
// Create a new entry so that the compressed len is recomputed.
outEntry
=
new
JarEntry
(
name
);
outEntry
.
setTime
(
timestamp
);
out
.
putNextEntry
(
outEntry
);
InputStream
data
=
in
.
getInputStream
(
inEntry
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
out
.
write
(
buffer
,
0
,
num
);
}
out
.
flush
();
}
}
// This class host functions that consumes JarFiles
public
static
class
FileUtils
{
// This class is to provide a file's content, but trimming out the last two bytes
// Used for signWholeFile
private
static
class
CMSProcessableFile
implements
CMSTypedData
{
/**
* Return one of USE_SHA1 or USE_SHA256 according to the signature
* algorithm specified in the cert.
*/
private
static
int
getDigestAlgorithm
(
X509Certificate
cert
)
{
String
sigAlg
=
cert
.
getSigAlgName
().
toUpperCase
(
Locale
.
US
);
if
(
"SHA1WITHRSA"
.
equals
(
sigAlg
)
||
"MD5WITHRSA"
.
equals
(
sigAlg
))
{
// see "HISTORICAL NOTE" above.
return
USE_SHA1
;
}
else
if
(
sigAlg
.
startsWith
(
"SHA256WITH"
))
{
return
USE_SHA256
;
}
else
{
throw
new
IllegalArgumentException
(
"unsupported signature algorithm \""
+
sigAlg
+
"\" in cert ["
+
cert
.
getSubjectDN
());
}
private
File
file
;
private
ASN1ObjectIdentifier
type
;
private
byte
[]
buffer
;
int
bufferSize
=
0
;
CMSProcessableFile
(
File
file
)
{
this
.
file
=
file
;
type
=
new
ASN1ObjectIdentifier
(
CMSObjectIdentifiers
.
data
.
getId
());
buffer
=
new
byte
[
4096
];
}
private
static
Manifest
addDigestsToManifest
(
JarFile
jar
,
int
hashes
)
throws
IOException
,
GeneralSecurityException
{
Manifest
input
=
jar
.
getManifest
();
Manifest
output
=
new
Manifest
();
Attributes
main
=
output
.
getMainAttributes
();
if
(
input
!=
null
)
{
main
.
putAll
(
input
.
getMainAttributes
());
}
else
{
main
.
putValue
(
"Manifest-Version"
,
"1.0"
);
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
}
MessageDigest
md_sha1
=
null
;
MessageDigest
md_sha256
=
null
;
if
((
hashes
&
USE_SHA1
)
!=
0
)
{
md_sha1
=
MessageDigest
.
getInstance
(
"SHA1"
);
}
if
((
hashes
&
USE_SHA256
)
!=
0
)
{
md_sha256
=
MessageDigest
.
getInstance
(
"SHA256"
);
}
byte
[]
buffer
=
new
byte
[
4096
];
int
num
;
// We sort the input entries by name, and add them to the
// output manifest in sorted order. We expect that the output
// map will be deterministic.
TreeMap
<
String
,
JarEntry
>
byName
=
new
TreeMap
<
String
,
JarEntry
>();
for
(
Enumeration
<
JarEntry
>
e
=
jar
.
entries
();
e
.
hasMoreElements
();
)
{
JarEntry
entry
=
e
.
nextElement
();
byName
.
put
(
entry
.
getName
(),
entry
);
}
for
(
JarEntry
entry:
byName
.
values
())
{
String
name
=
entry
.
getName
();
if
(!
entry
.
isDirectory
()
&&
(
stripPattern
==
null
||
!
stripPattern
.
matcher
(
name
).
matches
()))
{
InputStream
data
=
jar
.
getInputStream
(
entry
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
if
(
md_sha1
!=
null
)
md_sha1
.
update
(
buffer
,
0
,
num
);
if
(
md_sha256
!=
null
)
md_sha256
.
update
(
buffer
,
0
,
num
);
}
Attributes
attr
=
null
;
if
(
input
!=
null
)
attr
=
input
.
getAttributes
(
name
);
attr
=
attr
!=
null
?
new
Attributes
(
attr
)
:
new
Attributes
();
// Remove any previously computed digests from this entry's attributes.
for
(
Iterator
<
Object
>
i
=
attr
.
keySet
().
iterator
();
i
.
hasNext
();)
{
Object
key
=
i
.
next
();
if
(!(
key
instanceof
Attributes
.
Name
))
{
continue
;
}
String
attributeNameLowerCase
=
((
Attributes
.
Name
)
key
).
toString
().
toLowerCase
(
Locale
.
US
);
if
(
attributeNameLowerCase
.
endsWith
(
"-digest"
))
{
i
.
remove
();
}
}
// Add SHA-1 digest if requested
if
(
md_sha1
!=
null
)
{
attr
.
putValue
(
"SHA1-Digest"
,
new
String
(
Base64
.
encode
(
md_sha1
.
digest
()),
"ASCII"
));
}
// Add SHA-256 digest if requested
if
(
md_sha256
!=
null
)
{
attr
.
putValue
(
"SHA-256-Digest"
,
new
String
(
Base64
.
encode
(
md_sha256
.
digest
()),
"ASCII"
));
}
output
.
getEntries
().
put
(
name
,
attr
);
}
}
return
output
;
@Override
public
ASN1ObjectIdentifier
getContentType
()
{
return
type
;
}
private
static
void
copyFiles
(
Manifest
manifest
,
JarFile
in
,
JarOutputStream
out
,
long
timestamp
)
throws
IOException
{
byte
[]
buffer
=
new
byte
[
4096
];
int
num
;
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
ArrayList
<
String
>
names
=
new
ArrayList
<
String
>(
entries
.
keySet
());
Collections
.
sort
(
names
);
for
(
String
name
:
names
)
{
JarEntry
inEntry
=
in
.
getJarEntry
(
name
);
JarEntry
outEntry
=
null
;
if
(
inEntry
.
getMethod
()
==
JarEntry
.
STORED
)
{
// Preserve the STORED method of the input entry.
outEntry
=
new
JarEntry
(
inEntry
);
@Override
public
void
write
(
OutputStream
out
)
throws
IOException
,
CMSException
{
FileInputStream
input
=
new
FileInputStream
(
file
);
long
len
=
file
.
length
()
-
2
;
while
((
bufferSize
=
input
.
read
(
buffer
))
>
0
)
{
if
(
len
<=
bufferSize
)
{
out
.
write
(
buffer
,
0
,
(
int
)
len
);
break
;
}
else
{
// Create a new entry so that the compressed len is recomputed.
outEntry
=
new
JarEntry
(
name
);
}
outEntry
.
setTime
(
timestamp
);
out
.
putNextEntry
(
outEntry
);
InputStream
data
=
in
.
getInputStream
(
inEntry
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
out
.
write
(
buffer
,
0
,
num
);
out
.
write
(
buffer
,
0
,
bufferSize
);
}
out
.
flush
()
;
len
-=
bufferSize
;
}
}
private
static
void
signWholeFile
(
JarFile
inputJar
,
InputStream
publicKeyFile
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
outputStream
)
throws
Exception
{
CMSSigner
cmsOut
=
new
CMSSigner
(
inputJar
,
publicKeyFile
,
publicKey
,
privateKey
,
outputStream
);
ByteArrayOutputStream
temp
=
new
ByteArrayOutputStream
();
// put a readable message and a null char at the start of the
// archive comment, so that tools that display the comment
// (hopefully) show something sensible.
// TODO: anything more useful we can put in this message?
byte
[]
message
=
"signed by SignApk"
.
getBytes
(
"UTF-8"
);
temp
.
write
(
message
);
temp
.
write
(
0
);
cmsOut
.
writeSignatureBlock
(
temp
);
byte
[]
zipData
=
cmsOut
.
getSigner
().
getTail
();
// For a zip with no archive comment, the
// end-of-central-directory record will be 22 bytes long, so
// we expect to find the EOCD marker 22 bytes from the end.
if
(
zipData
[
zipData
.
length
-
22
]
!=
0x50
||
zipData
[
zipData
.
length
-
21
]
!=
0x4b
||
zipData
[
zipData
.
length
-
20
]
!=
0x05
||
zipData
[
zipData
.
length
-
19
]
!=
0x06
)
{
throw
new
IllegalArgumentException
(
"zip data already has an archive comment"
);
}
int
total_size
=
temp
.
size
()
+
6
;
if
(
total_size
>
0xffff
)
{
throw
new
IllegalArgumentException
(
"signature is too big for ZIP file comment"
);
}
// signature starts this many bytes from the end of the file
int
signature_start
=
total_size
-
message
.
length
-
1
;
temp
.
write
(
signature_start
&
0xff
);
temp
.
write
((
signature_start
>>
8
)
&
0xff
);
// Why the 0xff bytes? In a zip file with no archive comment,
// bytes [-6:-2] of the file are the little-endian offset from
// the start of the file to the central directory. So for the
// two high bytes to be 0xff 0xff, the archive would have to
// be nearly 4GB in size. So it's unlikely that a real
// commentless archive would have 0xffs here, and lets us tell
// an old signed archive from a new one.
temp
.
write
(
0xff
);
temp
.
write
(
0xff
);
temp
.
write
(
total_size
&
0xff
);
temp
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
flush
();
// Signature verification checks that the EOCD header is the
// last such sequence in the file (to avoid minzip finding a
// fake EOCD appended after the signature in its scan). The
// odds of producing this sequence by chance are very low, but
// let's catch it here if it does.
byte
[]
b
=
temp
.
toByteArray
();
for
(
int
i
=
0
;
i
<
b
.
length
-
3
;
++
i
)
{
if
(
b
[
i
]
==
0x50
&&
b
[
i
+
1
]
==
0x4b
&&
b
[
i
+
2
]
==
0x05
&&
b
[
i
+
3
]
==
0x06
)
{
throw
new
IllegalArgumentException
(
"found spurious EOCD header at "
+
i
);
}
}
outputStream
.
write
(
total_size
&
0xff
);
outputStream
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
writeTo
(
outputStream
);
@Override
public
Object
getContent
()
{
return
file
;
}
private
static
class
CMSSigner
implements
CMSTypedData
{
private
JarFile
inputJar
;
private
InputStream
publicKeyFile
;
private
X509Certificate
publicKey
;
private
PrivateKey
privateKey
;
private
OutputStream
outputStream
;
private
final
ASN1ObjectIdentifier
type
;
private
GeneralUtils
.
WholeFileSignerOutputStream
signer
;
public
CMSSigner
(
JarFile
inputJar
,
InputStream
publicKeyFile
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
outputStream
)
{
this
.
inputJar
=
inputJar
;
this
.
publicKeyFile
=
publicKeyFile
;
this
.
publicKey
=
publicKey
;
this
.
privateKey
=
privateKey
;
this
.
outputStream
=
outputStream
;
this
.
type
=
new
ASN1ObjectIdentifier
(
CMSObjectIdentifiers
.
data
.
getId
());
}
public
Object
getContent
()
{
// Not supported, but still don't crash or return null
return
1
;
}
public
ASN1ObjectIdentifier
getContentType
()
{
return
type
;
}
public
void
write
(
OutputStream
out
)
throws
IOException
{
try
{
signer
=
new
GeneralUtils
.
WholeFileSignerOutputStream
(
out
,
outputStream
);
JarOutputStream
outputJar
=
new
JarOutputStream
(
signer
);
int
hash
=
getDigestAlgorithm
(
publicKey
);
// Assume the certificate is valid for at least an hour.
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
Manifest
manifest
=
addDigestsToManifest
(
inputJar
,
hash
);
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
);
// Don't add Otacert, it's not an OTA
// addOtacert(outputJar, publicKeyFile, timestamp, manifest, hash);
GeneralUtils
.
signFile
(
manifest
,
publicKey
,
privateKey
,
outputJar
);
signer
.
notifyClosing
();
outputJar
.
close
();
signer
.
finish
();
}
catch
(
Exception
e
)
{
throw
new
IOException
(
e
);
}
}
public
void
writeSignatureBlock
(
ByteArrayOutputStream
temp
)
throws
IOException
,
CertificateEncodingException
,
OperatorCreationException
,
CMSException
{
GeneralUtils
.
writeSignatureBlock
(
this
,
publicKey
,
privateKey
,
temp
);
}
public
GeneralUtils
.
WholeFileSignerOutputStream
getSigner
()
{
return
signer
;
}
byte
[]
getTail
()
{
return
Arrays
.
copyOfRange
(
buffer
,
0
,
bufferSize
);
}
}
// This class host functions that consumes inputstreams
// Uses JarMap (virtual random access JarFile in memory)
public
static
class
StreamUtils
{
/**
* Add the hash(es) of every file to the manifest, creating it if
* necessary.
*/
private
static
Manifest
addDigestsToManifest
(
JarMap
jar
,
int
hashes
)
throws
IOException
,
GeneralSecurityException
{
Manifest
input
=
jar
.
getManifest
();
Manifest
output
=
new
Manifest
();
Attributes
main
=
output
.
getMainAttributes
();
if
(
input
!=
null
)
{
main
.
putAll
(
input
.
getMainAttributes
());
}
else
{
main
.
putValue
(
"Manifest-Version"
,
"1.0"
);
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
}
MessageDigest
md_sha1
=
null
;
MessageDigest
md_sha256
=
null
;
if
((
hashes
&
USE_SHA1
)
!=
0
)
{
md_sha1
=
MessageDigest
.
getInstance
(
"SHA1"
);
}
if
((
hashes
&
USE_SHA256
)
!=
0
)
{
md_sha256
=
MessageDigest
.
getInstance
(
"SHA256"
);
}
// We sort the input entries by name, and add them to the
// output manifest in sorted order. We expect that the output
// map will be deterministic.
/* JarMap is a TreeMap, so it's already sorted */
for
(
String
name
:
jar
.
keySet
())
{
JarEntry
entry
=
jar
.
getJarEntry
(
name
);
if
(!
entry
.
isDirectory
()
&&
(
stripPattern
==
null
||
!
stripPattern
.
matcher
(
name
).
matches
()))
{
byte
[]
buffer
=
jar
.
getStream
(
name
).
toByteArray
();
if
(
md_sha1
!=
null
)
md_sha1
.
update
(
buffer
,
0
,
buffer
.
length
);
if
(
md_sha256
!=
null
)
md_sha256
.
update
(
buffer
,
0
,
buffer
.
length
);
Attributes
attr
=
null
;
if
(
input
!=
null
)
attr
=
input
.
getAttributes
(
name
);
attr
=
attr
!=
null
?
new
Attributes
(
attr
)
:
new
Attributes
();
if
(
md_sha1
!=
null
)
{
attr
.
putValue
(
"SHA1-Digest"
,
new
String
(
Base64
.
encode
(
md_sha1
.
digest
()),
"ASCII"
));
}
if
(
md_sha256
!=
null
)
{
attr
.
putValue
(
"SHA-256-Digest"
,
new
String
(
Base64
.
encode
(
md_sha256
.
digest
()),
"ASCII"
));
}
output
.
getEntries
().
put
(
name
,
attr
);
}
}
return
output
;
}
/**
* Copy all the files in a manifest from input to output. We set
* the modification times in the output to a fixed time, so as to
* reduce variation in the output file and make incremental OTAs
* more efficient.
*/
private
static
void
copyFiles
(
Manifest
manifest
,
JarMap
in
,
JarOutputStream
out
,
long
timestamp
)
throws
IOException
{
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
ArrayList
<
String
>
names
=
new
ArrayList
<>(
entries
.
keySet
());
Collections
.
sort
(
names
);
for
(
String
name
:
names
)
{
JarEntry
inEntry
=
in
.
getJarEntry
(
name
);
JarEntry
outEntry
;
if
(
inEntry
.
getMethod
()
==
JarEntry
.
STORED
)
{
// Preserve the STORED method of the input entry.
outEntry
=
new
JarEntry
(
inEntry
);
}
else
{
// Create a new entry so that the compressed len is recomputed.
outEntry
=
new
JarEntry
(
name
);
}
outEntry
.
setTime
(
timestamp
);
out
.
putNextEntry
(
outEntry
);
in
.
getStream
(
name
).
writeTo
(
out
);
out
.
flush
();
}
}
private
static
void
signWholeFile
(
JarMap
inputJar
,
InputStream
publicKeyFile
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
outputStream
)
throws
Exception
{
CMSSigner
cmsOut
=
new
CMSSigner
(
inputJar
,
publicKeyFile
,
publicKey
,
privateKey
,
outputStream
);
ByteArrayOutputStream
temp
=
new
ByteArrayOutputStream
();
// put a readable message and a null char at the start of the
// archive comment, so that tools that display the comment
// (hopefully) show something sensible.
// TODO: anything more useful we can put in this message?
byte
[]
message
=
"signed by SignApk"
.
getBytes
(
"UTF-8"
);
temp
.
write
(
message
);
temp
.
write
(
0
);
cmsOut
.
writeSignatureBlock
(
temp
);
byte
[]
zipData
=
cmsOut
.
getSigner
().
getTail
();
// For a zip with no archive comment, the
// end-of-central-directory record will be 22 bytes long, so
// we expect to find the EOCD marker 22 bytes from the end.
if
(
zipData
[
zipData
.
length
-
22
]
!=
0x50
||
zipData
[
zipData
.
length
-
21
]
!=
0x4b
||
zipData
[
zipData
.
length
-
20
]
!=
0x05
||
zipData
[
zipData
.
length
-
19
]
!=
0x06
)
{
throw
new
IllegalArgumentException
(
"zip data already has an archive comment"
);
}
int
total_size
=
temp
.
size
()
+
6
;
if
(
total_size
>
0xffff
)
{
throw
new
IllegalArgumentException
(
"signature is too big for ZIP file comment"
);
}
// signature starts this many bytes from the end of the file
int
signature_start
=
total_size
-
message
.
length
-
1
;
temp
.
write
(
signature_start
&
0xff
);
temp
.
write
((
signature_start
>>
8
)
&
0xff
);
// Why the 0xff bytes? In a zip file with no archive comment,
// bytes [-6:-2] of the file are the little-endian offset from
// the start of the file to the central directory. So for the
// two high bytes to be 0xff 0xff, the archive would have to
// be nearly 4GB in size. So it's unlikely that a real
// commentless archive would have 0xffs here, and lets us tell
// an old signed archive from a new one.
temp
.
write
(
0xff
);
temp
.
write
(
0xff
);
temp
.
write
(
total_size
&
0xff
);
temp
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
flush
();
// Signature verification checks that the EOCD header is the
// last such sequence in the file (to avoid minzip finding a
// fake EOCD appended after the signature in its scan). The
// odds of producing this sequence by chance are very low, but
// let's catch it here if it does.
byte
[]
b
=
temp
.
toByteArray
();
for
(
int
i
=
0
;
i
<
b
.
length
-
3
;
++
i
)
{
if
(
b
[
i
]
==
0x50
&&
b
[
i
+
1
]
==
0x4b
&&
b
[
i
+
2
]
==
0x05
&&
b
[
i
+
3
]
==
0x06
)
{
throw
new
IllegalArgumentException
(
"found spurious EOCD header at "
+
i
);
}
}
outputStream
.
write
(
total_size
&
0xff
);
outputStream
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
writeTo
(
outputStream
);
private
static
void
signWholeFile
(
File
input
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
outputStream
)
throws
Exception
{
ByteArrayOutputStream
temp
=
new
ByteArrayOutputStream
();
// put a readable message and a null char at the start of the
// archive comment, so that tools that display the comment
// (hopefully) show something sensible.
// TODO: anything more useful we can put in this message?
byte
[]
message
=
"signed by SignApk"
.
getBytes
(
"UTF-8"
);
temp
.
write
(
message
);
temp
.
write
(
0
);
CMSProcessableFile
cmsFile
=
new
CMSProcessableFile
(
input
);
writeSignatureBlock
(
cmsFile
,
publicKey
,
privateKey
,
temp
);
// For a zip with no archive comment, the
// end-of-central-directory record will be 22 bytes long, so
// we expect to find the EOCD marker 22 bytes from the end.
byte
[]
zipData
=
cmsFile
.
getTail
();
if
(
zipData
[
zipData
.
length
-
22
]
!=
0x50
||
zipData
[
zipData
.
length
-
21
]
!=
0x4b
||
zipData
[
zipData
.
length
-
20
]
!=
0x05
||
zipData
[
zipData
.
length
-
19
]
!=
0x06
)
{
throw
new
IllegalArgumentException
(
"zip data already has an archive comment"
);
}
public
static
class
JarMap
extends
TreeMap
<
String
,
Pair
<
JarEntry
,
ByteArrayOutputStream
>
>
{
private
Manifest
manifest
;
public
JarMap
(
JarInputStream
in
)
throws
IOException
{
super
();
manifest
=
in
.
getManifest
();
byte
[]
buffer
=
new
byte
[
4096
];
int
num
;
JarEntry
entry
;
while
((
entry
=
in
.
getNextJarEntry
())
!=
null
)
{
ByteArrayOutputStream
stream
=
new
ByteArrayOutputStream
();
while
((
num
=
in
.
read
(
buffer
))
>
0
)
{
stream
.
write
(
buffer
,
0
,
num
);
}
put
(
entry
.
getName
(),
entry
,
stream
);
}
in
.
close
();
}
public
JarEntry
getJarEntry
(
String
name
)
{
return
get
(
name
).
first
;
}
public
ByteArrayOutputStream
getStream
(
String
name
)
{
return
get
(
name
).
second
;
}
public
void
put
(
String
name
,
JarEntry
entry
,
ByteArrayOutputStream
stream
)
{
put
(
name
,
new
Pair
<>(
entry
,
stream
));
}
public
Manifest
getManifest
()
{
return
manifest
;
}
public
Enumeration
<
JarEntry
>
entries
()
{
Iterator
<
Map
.
Entry
<
String
,
Pair
<
JarEntry
,
ByteArrayOutputStream
>
>>
i
=
entrySet
().
iterator
();
ArrayList
<
JarEntry
>
list
=
new
ArrayList
<>();
while
(
i
.
hasNext
())
list
.
add
(
i
.
next
().
getValue
().
first
);
return
Collections
.
enumeration
(
list
);
}
int
total_size
=
temp
.
size
()
+
6
;
if
(
total_size
>
0xffff
)
{
throw
new
IllegalArgumentException
(
"signature is too big for ZIP file comment"
);
}
private
static
class
CMSSigner
implements
CMSTypedData
{
private
JarMap
inputJar
;
private
InputStream
publicKeyFile
;
private
X509Certificate
publicKey
;
private
PrivateKey
privateKey
;
private
OutputStream
outputStream
;
private
final
ASN1ObjectIdentifier
type
;
private
GeneralUtils
.
WholeFileSignerOutputStream
signer
;
public
CMSSigner
(
JarMap
inputJar
,
InputStream
publicKeyFile
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
outputStream
)
{
this
.
inputJar
=
inputJar
;
this
.
publicKeyFile
=
publicKeyFile
;
this
.
publicKey
=
publicKey
;
this
.
privateKey
=
privateKey
;
this
.
outputStream
=
outputStream
;
this
.
type
=
new
ASN1ObjectIdentifier
(
CMSObjectIdentifiers
.
data
.
getId
());
}
public
Object
getContent
()
{
// Not supported, but still don't crash or return null
return
1
;
}
public
ASN1ObjectIdentifier
getContentType
()
{
return
type
;
}
public
void
write
(
OutputStream
out
)
throws
IOException
{
try
{
signer
=
new
GeneralUtils
.
WholeFileSignerOutputStream
(
out
,
outputStream
);
JarOutputStream
outputJar
=
new
JarOutputStream
(
signer
);
int
hash
=
FileUtils
.
getDigestAlgorithm
(
publicKey
);
// Assume the certificate is valid for at least an hour.
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
Manifest
manifest
=
addDigestsToManifest
(
inputJar
,
hash
);
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
);
// Don't add Otacert, it's not an OTA
// addOtacert(outputJar, publicKeyFile, timestamp, manifest, hash);
GeneralUtils
.
signFile
(
manifest
,
publicKey
,
privateKey
,
outputJar
);
signer
.
notifyClosing
();
outputJar
.
close
();
signer
.
finish
();
}
catch
(
Exception
e
)
{
throw
new
IOException
(
e
);
}
}
public
void
writeSignatureBlock
(
ByteArrayOutputStream
temp
)
throws
IOException
,
CertificateEncodingException
,
OperatorCreationException
,
CMSException
{
GeneralUtils
.
writeSignatureBlock
(
this
,
publicKey
,
privateKey
,
temp
);
}
public
GeneralUtils
.
WholeFileSignerOutputStream
getSigner
()
{
return
signer
;
// signature starts this many bytes from the end of the file
int
signature_start
=
total_size
-
message
.
length
-
1
;
temp
.
write
(
signature_start
&
0xff
);
temp
.
write
((
signature_start
>>
8
)
&
0xff
);
// Why the 0xff bytes? In a zip file with no archive comment,
// bytes [-6:-2] of the file are the little-endian offset from
// the start of the file to the central directory. So for the
// two high bytes to be 0xff 0xff, the archive would have to
// be nearly 4GB in size. So it's unlikely that a real
// commentless archive would have 0xffs here, and lets us tell
// an old signed archive from a new one.
temp
.
write
(
0xff
);
temp
.
write
(
0xff
);
temp
.
write
(
total_size
&
0xff
);
temp
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
flush
();
// Signature verification checks that the EOCD header is the
// last such sequence in the file (to avoid minzip finding a
// fake EOCD appended after the signature in its scan). The
// odds of producing this sequence by chance are very low, but
// let's catch it here if it does.
byte
[]
b
=
temp
.
toByteArray
();
for
(
int
i
=
0
;
i
<
b
.
length
-
3
;
++
i
)
{
if
(
b
[
i
]
==
0x50
&&
b
[
i
+
1
]
==
0x4b
&&
b
[
i
+
2
]
==
0x05
&&
b
[
i
+
3
]
==
0x06
)
{
throw
new
IllegalArgumentException
(
"found spurious EOCD header at "
+
i
);
}
}
cmsFile
.
write
(
outputStream
);
outputStream
.
write
(
total_size
&
0xff
);
outputStream
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
writeTo
(
outputStream
);
}
private
static
void
signFile
(
Manifest
manifest
,
JarFile
inputJar
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
JarOutputStream
outputJar
)
throws
Exception
{
// Assume the certificate is valid for at least an hour.
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
// MANIFEST.MF
JarEntry
je
=
new
JarEntry
(
JarFile
.
MANIFEST_NAME
);
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
manifest
.
write
(
outputJar
);
je
=
new
JarEntry
(
CERT_SF_NAME
);
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
ByteArrayOutputStream
baos
=
new
ByteArrayOutputStream
();
writeSignatureFile
(
manifest
,
baos
,
getDigestAlgorithm
(
publicKey
));
byte
[]
signedData
=
baos
.
toByteArray
();
outputJar
.
write
(
signedData
);
// CERT.{EC,RSA} / CERT#.{EC,RSA}
final
String
keyType
=
publicKey
.
getPublicKey
().
getAlgorithm
();
je
=
new
JarEntry
(
String
.
format
(
CERT_SIG_NAME
,
keyType
));
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
writeSignatureBlock
(
new
CMSProcessableByteArray
(
signedData
),
publicKey
,
privateKey
,
outputJar
);
}
}
}
\ No newline at end of file
app/src/main/jni/jni_glue.c
View file @
fd4aaab1
...
...
@@ -3,58 +3,17 @@
//
#include <jni.h>
#include <stdlib.h>
#include <unistd.h>
#include <fcntl.h>
#include "zipadjust.h"
JNIEXPORT
jbyteArray
JNICALL
Java_com_topjohnwu_magisk_utils_ZipUtils_zipAdjust___3BI
(
JNIEnv
*
env
,
jclass
type
,
jbyteArray
jbytes
,
jint
size
)
{
fin
=
(
*
env
)
->
GetPrimitiveArrayCritical
(
env
,
jbytes
,
NULL
);
insize
=
(
size_t
)
size
;
zipadjust
(
0
);
(
*
env
)
->
ReleasePrimitiveArrayCritical
(
env
,
jbytes
,
fin
,
0
);
jbyteArray
ret
=
(
*
env
)
->
NewByteArray
(
env
,
outsize
);
(
*
env
)
->
SetByteArrayRegion
(
env
,
ret
,
0
,
outsize
,
(
const
jbyte
*
)
fout
);
free
(
fout
);
return
ret
;
}
JNIEXPORT
void
JNICALL
Java_com_topjohnwu_magisk_utils_ZipUtils_zipAdjust__Ljava_lang_String_2
(
JNIEnv
*
env
,
jclass
type
,
jstring
name
)
{
const
char
*
filename
=
(
*
env
)
->
GetStringUTFChars
(
env
,
name
,
NULL
);
int
fd
=
open
(
filename
,
O_RDONLY
);
if
(
fd
<
0
)
return
;
// Load the file to memory
insize
=
lseek
(
fd
,
0
,
SEEK_END
);
lseek
(
fd
,
0
,
SEEK_SET
);
fin
=
malloc
(
insize
);
read
(
fd
,
fin
,
insize
);
zipadjust
(
0
);
close
(
fd
);
// Open file for output
fd
=
open
(
filename
,
O_WRONLY
|
O_TRUNC
);
if
(
fd
<
0
)
return
;
(
*
env
)
->
ReleaseStringUTFChars
(
env
,
name
,
filename
);
// Write back to file
lseek
(
fd
,
0
,
SEEK_SET
);
write
(
fd
,
fout
,
outsize
);
Java_com_topjohnwu_magisk_utils_ZipUtils_zipAdjust
(
JNIEnv
*
env
,
jclass
type
,
jstring
filenameIn_
,
jstring
filenameOut_
)
{
const
char
*
filenameIn
=
(
*
env
)
->
GetStringUTFChars
(
env
,
filenameIn_
,
0
);
const
char
*
filenameOut
=
(
*
env
)
->
GetStringUTFChars
(
env
,
filenameOut_
,
0
);
close
(
fd
);
free
(
fin
);
free
(
fout
);
// TODO
zipadjust
(
filenameIn
,
filenameOut
,
0
);
}
(
*
env
)
->
ReleaseStringUTFChars
(
env
,
filenameIn_
,
filenameIn
);
(
*
env
)
->
ReleaseStringUTFChars
(
env
,
filenameOut_
,
filenameOut
);
}
\ No newline at end of file
app/src/main/jni/zipadjust.c
View file @
fd4aaab1
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <zlib.h>
#include <unistd.h>
#include "zipadjust.h"
size_t
insize
=
0
,
outsize
=
0
,
alloc
=
0
;
unsigned
char
*
fin
=
NULL
,
*
fout
=
NULL
;
#ifndef O_BINARY
#define O_BINARY 0
#define O_TEXT 0
#endif
#pragma pack(1)
struct
local_header_struct
{
...
...
@@ -81,41 +86,43 @@ static int xerror(char* message) {
return
0
;
}
static
int
xseekread
(
off_t
offset
,
void
*
buf
,
size_t
bytes
)
{
memcpy
(
buf
,
fin
+
offset
,
bytes
);
static
int
xseekread
(
int
fd
,
off_t
offset
,
void
*
buf
,
size_t
bytes
)
{
if
(
lseek
(
fd
,
offset
,
SEEK_SET
)
==
(
off_t
)
-
1
)
return
xerror
(
"Seek failed"
);
if
(
read
(
fd
,
buf
,
bytes
)
!=
bytes
)
return
xerror
(
"Read failed"
);
return
1
;
}
static
int
xseekwrite
(
off_t
offset
,
const
void
*
buf
,
size_t
bytes
)
{
if
(
offset
+
bytes
>
outsize
)
outsize
=
offset
+
bytes
;
if
(
outsize
>
alloc
)
{
fout
=
realloc
(
fout
,
outsize
);
alloc
=
outsize
;
}
memcpy
(
fout
+
offset
,
buf
,
bytes
);
static
int
xseekwrite
(
int
fd
,
off_t
offset
,
void
*
buf
,
size_t
bytes
)
{
if
(
lseek
(
fd
,
offset
,
SEEK_SET
)
==
(
off_t
)
-
1
)
return
xerror
(
"Seek failed"
);
if
(
write
(
fd
,
buf
,
bytes
)
!=
bytes
)
return
xerror
(
"Write failed"
);
return
1
;
}
static
int
xfilecopy
(
off_t
offsetIn
,
off_t
offsetOut
,
size_t
bytes
)
{
unsigned
int
CHUNK
=
256
*
1024
;
unsigned
char
*
buf
=
malloc
(
CHUNK
);
static
int
xfilecopy
(
int
fdIn
,
int
fdOut
,
off_t
offsetIn
,
off_t
offsetOut
,
size_t
bytes
)
{
if
((
offsetIn
!=
(
off_t
)
-
1
)
&&
(
lseek
(
fdIn
,
offsetIn
,
SEEK_SET
)
==
(
off_t
)
-
1
))
return
xerror
(
"Seek failed"
);
if
((
offsetOut
!=
(
off_t
)
-
1
)
&&
(
lseek
(
fdOut
,
offsetOut
,
SEEK_SET
)
==
(
off_t
)
-
1
))
return
xerror
(
"Seek failed"
);
int
CHUNK
=
256
*
1024
;
void
*
buf
=
malloc
(
CHUNK
);
if
(
buf
==
NULL
)
return
xerror
(
"malloc failed"
);
size_t
left
=
bytes
;
while
(
left
>
0
)
{
size_t
wanted
=
(
left
<
CHUNK
)
?
left
:
CHUNK
;
xseekread
(
offsetIn
,
buf
,
wanted
);
xseekwrite
(
offsetOut
,
buf
,
wanted
);
offsetIn
+=
wanted
;
offsetOut
+=
wanted
;
left
-=
wanted
;
size_t
r
=
read
(
fdIn
,
buf
,
wanted
);
if
(
r
<=
0
)
return
xerror
(
"Read failed"
);
if
(
write
(
fdOut
,
buf
,
r
)
!=
r
)
return
xerror
(
"Write failed"
);
left
-=
r
;
}
free
(
buf
);
return
1
;
}
static
int
xdecompress
(
off_t
offsetIn
,
off_t
offsetOut
,
size_t
bytes
)
{
unsigned
int
CHUNK
=
256
*
1024
;
static
int
xdecompress
(
int
fdIn
,
int
fdOut
,
off_t
offsetIn
,
off_t
offsetOut
,
size_t
bytes
)
{
if
((
offsetIn
!=
(
off_t
)
-
1
)
&&
(
lseek
(
fdIn
,
offsetIn
,
SEEK_SET
)
==
(
off_t
)
-
1
))
return
xerror
(
"Seek failed"
);
if
((
offsetOut
!=
(
off_t
)
-
1
)
&&
(
lseek
(
fdOut
,
offsetOut
,
SEEK_SET
)
==
(
off_t
)
-
1
))
return
xerror
(
"Seek failed"
);
int
CHUNK
=
256
*
1024
;
int
ret
;
unsigned
have
;
...
...
@@ -132,12 +139,9 @@ static int xdecompress(off_t offsetIn, off_t offsetOut, size_t bytes) {
if
(
ret
!=
Z_OK
)
return
xerror
(
"ret != Z_OK"
);
do
{
strm
.
avail_in
=
insize
-
offsetIn
;
strm
.
avail_in
=
read
(
fdIn
,
in
,
CHUNK
)
;
if
(
strm
.
avail_in
==
0
)
break
;
strm
.
avail_in
=
(
strm
.
avail_in
>
CHUNK
)
?
CHUNK
:
strm
.
avail_in
;
xseekread
(
offsetIn
,
in
,
strm
.
avail_in
);
strm
.
next_in
=
in
;
offsetIn
+=
strm
.
avail_in
;
do
{
strm
.
avail_out
=
CHUNK
;
...
...
@@ -155,8 +159,10 @@ static int xdecompress(off_t offsetIn, off_t offsetOut, size_t bytes) {
}
have
=
CHUNK
-
strm
.
avail_out
;
xseekwrite
(
offsetOut
,
out
,
have
);
offsetOut
+=
have
;
if
(
write
(
fdOut
,
out
,
have
)
!=
have
)
{
(
void
)
inflateEnd
(
&
strm
);
return
xerror
(
"Write failed"
);
}
}
while
(
strm
.
avail_out
==
0
);
}
while
(
ret
!=
Z_STREAM_END
);
(
void
)
inflateEnd
(
&
strm
);
...
...
@@ -164,118 +170,128 @@ static int xdecompress(off_t offsetIn, off_t offsetOut, size_t bytes) {
return
ret
==
Z_STREAM_END
?
1
:
0
;
}
int
zipadjust
(
int
decompress
)
{
int
zipadjust
(
const
char
*
filenameIn
,
const
char
*
filenameOut
,
int
decompress
)
{
int
ok
=
0
;
char
filename
[
1024
];
central_footer_t
central_footer
;
uint32_t
central_directory_in_position
=
0
;
uint32_t
central_directory_in_size
=
0
;
uint32_t
central_directory_out_size
=
0
;
int
i
;
for
(
i
=
insize
-
4
;
i
>=
0
;
i
--
)
{
uint32_t
magic
=
0
;
if
(
!
xseekread
(
i
,
&
magic
,
sizeof
(
uint32_t
)))
return
0
;
if
(
magic
==
MAGIC_CENTRAL_FOOTER
)
{
LOGD
(
"central footer @ %08X
\n
"
,
i
);
if
(
!
xseekread
(
i
,
&
central_footer
,
sizeof
(
central_footer_t
)))
return
0
;
central_header_t
central_header
;
if
(
!
xseekread
(
central_footer
.
central_directory_offset
,
&
central_header
,
sizeof
(
central_header_t
)))
return
0
;
if
(
central_header
.
signature
==
MAGIC_CENTRAL_HEADER
)
{
central_directory_in_position
=
central_footer
.
central_directory_offset
;
central_directory_in_size
=
insize
-
central_footer
.
central_directory_offset
;
LOGD
(
"central header @ %08X (%d)
\n
"
,
central_footer
.
central_directory_offset
,
central_footer
.
central_directory_size
);
break
;
int
fin
=
open
(
filenameIn
,
O_RDONLY
|
O_BINARY
);
if
(
fin
>
0
)
{
unsigned
int
size
=
lseek
(
fin
,
0
,
SEEK_END
);
lseek
(
fin
,
0
,
SEEK_SET
);
LOGD
(
"%d bytes
\n
"
,
size
);
char
filename
[
1024
];
central_footer_t
central_footer
;
uint32_t
central_directory_in_position
=
0
;
uint32_t
central_directory_in_size
=
0
;
uint32_t
central_directory_out_size
=
0
;
int
i
;
for
(
i
=
size
-
4
;
i
>=
0
;
i
--
)
{
uint32_t
magic
=
0
;
if
(
!
xseekread
(
fin
,
i
,
&
magic
,
sizeof
(
uint32_t
)))
return
0
;
if
(
magic
==
MAGIC_CENTRAL_FOOTER
)
{
LOGD
(
"central footer @ %08X
\n
"
,
i
);
if
(
!
xseekread
(
fin
,
i
,
&
central_footer
,
sizeof
(
central_footer_t
)))
return
0
;
central_header_t
central_header
;
if
(
!
xseekread
(
fin
,
central_footer
.
central_directory_offset
,
&
central_header
,
sizeof
(
central_header_t
)))
return
0
;
if
(
central_header
.
signature
==
MAGIC_CENTRAL_HEADER
)
{
central_directory_in_position
=
central_footer
.
central_directory_offset
;
central_directory_in_size
=
size
-
central_footer
.
central_directory_offset
;
LOGD
(
"central header @ %08X (%d)
\n
"
,
central_footer
.
central_directory_offset
,
central_footer
.
central_directory_size
);
break
;
}
}
}
}
if
(
central_directory_in_position
==
0
)
return
0
;
unsigned
char
*
central_directory_in
=
(
unsigned
char
*
)
malloc
(
central_directory_in_size
);
unsigned
char
*
central_directory_out
=
(
unsigned
char
*
)
malloc
(
central_directory_in_size
);
if
(
!
xseekread
(
central_directory_in_position
,
central_directory_in
,
central_directory_in_size
))
return
0
;
memset
(
central_directory_out
,
0
,
central_directory_in_size
);
fout
=
(
unsigned
char
*
)
malloc
(
insize
);
alloc
=
insize
;
uintptr_t
central_directory_in_index
=
0
;
uintptr_t
central_directory_out_index
=
0
;
central_header_t
*
central_header
=
NULL
;
uint32_t
out_index
=
0
;
while
(
1
)
{
central_header
=
(
central_header_t
*
)
&
central_directory_in
[
central_directory_in_index
];
if
(
central_header
->
signature
!=
MAGIC_CENTRAL_HEADER
)
break
;
filename
[
central_header
->
length_filename
]
=
(
char
)
0
;
memcpy
(
filename
,
&
central_directory_in
[
central_directory_in_index
+
sizeof
(
central_header_t
)],
central_header
->
length_filename
);
LOGD
(
"%s (%d --> %d) [%08X] (%d)
\n
"
,
filename
,
central_header
->
size_uncompressed
,
central_header
->
size_compressed
,
central_header
->
crc32
,
central_header
->
length_extra
+
central_header
->
length_comment
);
if
(
central_directory_in_position
==
0
)
return
0
;
unsigned
char
*
central_directory_in
=
(
unsigned
char
*
)
malloc
(
central_directory_in_size
);
unsigned
char
*
central_directory_out
=
(
unsigned
char
*
)
malloc
(
central_directory_in_size
);
if
(
!
xseekread
(
fin
,
central_directory_in_position
,
central_directory_in
,
central_directory_in_size
))
return
0
;
memset
(
central_directory_out
,
0
,
central_directory_in_size
);
unlink
(
filenameOut
);
int
fout
=
open
(
filenameOut
,
O_CREAT
|
O_WRONLY
|
O_BINARY
,
0644
);
if
(
fout
>
0
)
{
uintptr_t
central_directory_in_index
=
0
;
uintptr_t
central_directory_out_index
=
0
;
central_header_t
*
central_header
=
NULL
;
uint32_t
out_index
=
0
;
while
(
1
)
{
central_header
=
(
central_header_t
*
)
&
central_directory_in
[
central_directory_in_index
];
if
(
central_header
->
signature
!=
MAGIC_CENTRAL_HEADER
)
break
;
filename
[
central_header
->
length_filename
]
=
(
char
)
0
;
memcpy
(
filename
,
&
central_directory_in
[
central_directory_in_index
+
sizeof
(
central_header_t
)],
central_header
->
length_filename
);
LOGD
(
"%s (%d --> %d) [%08X] (%d)
\n
"
,
filename
,
central_header
->
size_uncompressed
,
central_header
->
size_compressed
,
central_header
->
crc32
,
central_header
->
length_extra
+
central_header
->
length_comment
);
local_header_t
local_header
;
if
(
!
xseekread
(
fin
,
central_header
->
offset
,
&
local_header
,
sizeof
(
local_header_t
)))
return
0
;
// save and update to next index before we clobber the data
uint16_t
compression_method_old
=
central_header
->
compression_method
;
uint32_t
size_compressed_old
=
central_header
->
size_compressed
;
uint32_t
offset_old
=
central_header
->
offset
;
uint32_t
length_extra_old
=
central_header
->
length_extra
;
central_directory_in_index
+=
sizeof
(
central_header_t
)
+
central_header
->
length_filename
+
central_header
->
length_extra
+
central_header
->
length_comment
;
// copying, rewriting, and correcting local and central headers so all the information matches, and no data descriptors are necessary
central_header
->
offset
=
out_index
;
central_header
->
flags
=
central_header
->
flags
&
!
8
;
if
(
decompress
&&
(
compression_method_old
==
8
))
{
central_header
->
compression_method
=
0
;
central_header
->
size_compressed
=
central_header
->
size_uncompressed
;
}
central_header
->
length_extra
=
0
;
central_header
->
length_comment
=
0
;
local_header
.
compression_method
=
central_header
->
compression_method
;
local_header
.
flags
=
central_header
->
flags
;
local_header
.
crc32
=
central_header
->
crc32
;
local_header
.
size_uncompressed
=
central_header
->
size_uncompressed
;
local_header
.
size_compressed
=
central_header
->
size_compressed
;
local_header
.
length_extra
=
0
;
if
(
!
xseekwrite
(
fout
,
out_index
,
&
local_header
,
sizeof
(
local_header_t
)))
return
0
;
out_index
+=
sizeof
(
local_header_t
);
if
(
!
xseekwrite
(
fout
,
out_index
,
&
filename
[
0
],
central_header
->
length_filename
))
return
0
;
out_index
+=
central_header
->
length_filename
;
if
(
decompress
&&
(
compression_method_old
==
8
))
{
if
(
!
xdecompress
(
fin
,
fout
,
offset_old
+
sizeof
(
local_header_t
)
+
central_header
->
length_filename
+
length_extra_old
,
out_index
,
size_compressed_old
))
return
0
;
}
else
{
if
(
!
xfilecopy
(
fin
,
fout
,
offset_old
+
sizeof
(
local_header_t
)
+
central_header
->
length_filename
+
length_extra_old
,
out_index
,
size_compressed_old
))
return
0
;
}
out_index
+=
local_header
.
size_compressed
;
memcpy
(
&
central_directory_out
[
central_directory_out_index
],
central_header
,
sizeof
(
central_header_t
)
+
central_header
->
length_filename
);
central_directory_out_index
+=
sizeof
(
central_header_t
)
+
central_header
->
length_filename
;
}
local_header_t
local_header
;
if
(
!
xseekread
(
central_header
->
offset
,
&
local_header
,
sizeof
(
local_header_t
)))
return
0
;
central_directory_out_size
=
central_directory_out_index
;
central_footer
.
central_directory_size
=
central_directory_out_size
;
central_footer
.
central_directory_offset
=
out_index
;
central_footer
.
length_comment
=
0
;
if
(
!
xseekwrite
(
fout
,
out_index
,
central_directory_out
,
central_directory_out_size
))
return
0
;
out_index
+=
central_directory_out_size
;
if
(
!
xseekwrite
(
fout
,
out_index
,
&
central_footer
,
sizeof
(
central_footer_t
)))
return
0
;
// save and update to next index before we clobber the data
uint16_t
compression_method_old
=
central_header
->
compression_method
;
uint32_t
size_compressed_old
=
central_header
->
size_compressed
;
uint32_t
offset_old
=
central_header
->
offset
;
uint32_t
length_extra_old
=
central_header
->
length_extra
;
central_directory_in_index
+=
sizeof
(
central_header_t
)
+
central_header
->
length_filename
+
central_header
->
length_extra
+
central_header
->
length_comment
;
LOGD
(
"central header @ %08X (%d)
\n
"
,
central_footer
.
central_directory_offset
,
central_footer
.
central_directory_size
);
LOGD
(
"central footer @ %08X
\n
"
,
out_index
);
// copying, rewriting, and correcting local and central headers so all the information matches, and no data descriptors are necessary
central_header
->
offset
=
out_index
;
central_header
->
flags
=
central_header
->
flags
&
!
8
;
if
(
decompress
&&
(
compression_method_old
==
8
))
{
central_header
->
compression_method
=
0
;
central_header
->
size_compressed
=
central_header
->
size_uncompressed
;
close
(
fout
);
ok
=
1
;
}
central_header
->
length_extra
=
0
;
central_header
->
length_comment
=
0
;
local_header
.
compression_method
=
central_header
->
compression_method
;
local_header
.
flags
=
central_header
->
flags
;
local_header
.
crc32
=
central_header
->
crc32
;
local_header
.
size_uncompressed
=
central_header
->
size_uncompressed
;
local_header
.
size_compressed
=
central_header
->
size_compressed
;
local_header
.
length_extra
=
0
;
if
(
!
xseekwrite
(
out_index
,
&
local_header
,
sizeof
(
local_header_t
)))
return
0
;
out_index
+=
sizeof
(
local_header_t
);
if
(
!
xseekwrite
(
out_index
,
&
filename
[
0
],
central_header
->
length_filename
))
return
0
;
out_index
+=
central_header
->
length_filename
;
if
(
decompress
&&
(
compression_method_old
==
8
))
{
if
(
!
xdecompress
(
offset_old
+
sizeof
(
local_header_t
)
+
central_header
->
length_filename
+
length_extra_old
,
out_index
,
size_compressed_old
))
return
0
;
}
else
{
if
(
!
xfilecopy
(
offset_old
+
sizeof
(
local_header_t
)
+
central_header
->
length_filename
+
length_extra_old
,
out_index
,
size_compressed_old
))
return
0
;
}
out_index
+=
local_header
.
size_compressed
;
memcpy
(
&
central_directory_out
[
central_directory_out_index
],
central_header
,
sizeof
(
central_header_t
)
+
central_header
->
length_filename
);
central_directory_out_index
+=
sizeof
(
central_header_t
)
+
central_header
->
length_filename
;
free
(
central_directory_in
);
free
(
central_directory_out
);
close
(
fin
);
}
central_directory_out_size
=
central_directory_out_index
;
central_footer
.
central_directory_size
=
central_directory_out_size
;
central_footer
.
central_directory_offset
=
out_index
;
central_footer
.
length_comment
=
0
;
if
(
!
xseekwrite
(
out_index
,
central_directory_out
,
central_directory_out_size
))
return
0
;
out_index
+=
central_directory_out_size
;
if
(
!
xseekwrite
(
out_index
,
&
central_footer
,
sizeof
(
central_footer_t
)))
return
0
;
LOGD
(
"central header @ %08X (%d)
\n
"
,
central_footer
.
central_directory_offset
,
central_footer
.
central_directory_size
);
LOGD
(
"central footer @ %08X
\n
"
,
out_index
);
ok
=
1
;
free
(
central_directory_in
);
free
(
central_directory_out
);
return
ok
;
}
\ No newline at end of file
}
app/src/main/jni/zipadjust.h
View file @
fd4aaab1
...
...
@@ -3,10 +3,7 @@
#include <android/log.h>
int
zipadjust
(
int
decompress
);
extern
size_t
insize
,
outsize
,
alloc
;
extern
unsigned
char
*
fin
,
*
fout
;
int
zipadjust
(
const
char
*
filenameIn
,
const
char
*
filenameOut
,
int
decompress
);
#define LOG_TAG "zipadjust"
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment