Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
M
Magisk
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Administrator
Magisk
Commits
fd4aaab1
Commit
fd4aaab1
authored
Jun 15, 2017
by
topjohnwu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Rewrite zip signing
parent
42d14d5c
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
593 additions
and
1079 deletions
+593
-1079
ProcessRepoZip.java
...main/java/com/topjohnwu/magisk/asyncs/ProcessRepoZip.java
+12
-31
ManagerUpdate.java
...in/java/com/topjohnwu/magisk/receivers/ManagerUpdate.java
+0
-1
ZipUtils.java
app/src/main/java/com/topjohnwu/magisk/utils/ZipUtils.java
+426
-864
jni_glue.c
app/src/main/jni/jni_glue.c
+9
-50
zipadjust.c
app/src/main/jni/zipadjust.c
+145
-129
zipadjust.h
app/src/main/jni/zipadjust.h
+1
-4
No files found.
app/src/main/java/com/topjohnwu/magisk/asyncs/ProcessRepoZip.java
View file @
fd4aaab1
...
@@ -14,7 +14,6 @@ import com.topjohnwu.magisk.utils.ZipUtils;
...
@@ -14,7 +14,6 @@ import com.topjohnwu.magisk.utils.ZipUtils;
import
java.io.File
;
import
java.io.File
;
import
java.io.FileInputStream
;
import
java.io.FileInputStream
;
import
java.io.FileNotFoundException
;
import
java.io.FileNotFoundException
;
import
java.io.FileOutputStream
;
import
java.io.OutputStream
;
import
java.io.OutputStream
;
public
class
ProcessRepoZip
extends
ParallelTask
<
Void
,
Void
,
Boolean
>
{
public
class
ProcessRepoZip
extends
ParallelTask
<
Void
,
Void
,
Boolean
>
{
...
@@ -38,51 +37,33 @@ public class ProcessRepoZip extends ParallelTask<Void, Void, Boolean> {
...
@@ -38,51 +37,33 @@ public class ProcessRepoZip extends ParallelTask<Void, Void, Boolean> {
@Override
@Override
protected
Boolean
doInBackground
(
Void
...
params
)
{
protected
Boolean
doInBackground
(
Void
...
params
)
{
FileInputStream
in
;
FileOutputStream
out
;
try
{
try
{
// Create temp file
// Create temp file
File
temp1
=
new
File
(
magiskManager
.
getCacheDir
(),
"1.zip"
);
File
temp1
=
new
File
(
magiskManager
.
getCacheDir
(),
"1.zip"
);
File
temp2
=
new
File
(
magiskManager
.
getCacheDir
(),
"2.zip"
);
File
temp2
=
new
File
(
magiskManager
.
getCacheDir
(),
"2.zip"
);
if
(
magiskManager
.
getCacheDir
().
mkdirs
())
{
magiskManager
.
getCacheDir
().
mkdirs
();
temp1
.
createNewFile
();
temp1
.
createNewFile
();
temp2
.
createNewFile
();
temp2
.
createNewFile
();
}
out
=
new
FileOutputStream
(
temp1
);
// First remove top folder in Github source zip, Uri -> temp1
// First remove top folder in Github source zip, Uri -> temp1
ZipUtils
.
removeTopFolder
(
activity
.
getContentResolver
().
openInputStream
(
mUri
),
out
);
ZipUtils
.
removeTopFolder
(
activity
.
getContentResolver
().
openInputStream
(
mUri
),
temp1
);
out
.
flush
();
out
.
close
();
out
=
new
FileOutputStream
(
temp2
);
// Then sign the zip for the first time, temp1 -> temp2
// Then sign the zip for the first time, temp1 -> temp2
ZipUtils
.
signZip
(
activity
,
temp1
,
out
,
false
);
ZipUtils
.
signZip
(
activity
,
temp1
,
temp2
,
false
);
out
.
flush
();
out
.
close
();
// Adjust the zip to prevent unzip issues, temp2 -> temp2
ZipUtils
.
adjustZip
(
temp2
);
out
=
new
FileOutputStream
(
temp1
);
// Finally, sign the whole zip file again, temp2 -> temp1
// Adjust the zip to prevent unzip issues, temp2 -> temp1
ZipUtils
.
signZip
(
activity
,
temp2
,
out
,
true
);
ZipUtils
.
zipAdjust
(
temp2
.
getPath
(),
temp1
.
getPath
());
out
.
flush
();
out
.
close
();
in
=
new
FileInputStream
(
temp1
);
// Finally, sign the whole zip file again, temp1 -> temp2
ZipUtils
.
signZip
(
activity
,
temp1
,
temp2
,
true
);
// Write it back to the downloaded zip, temp1 -> Uri
// Write it back to the downloaded zip, temp2 -> Uri
FileInputStream
in
=
new
FileInputStream
(
temp2
);
try
(
OutputStream
target
=
activity
.
getContentResolver
().
openOutputStream
(
mUri
))
{
try
(
OutputStream
target
=
activity
.
getContentResolver
().
openOutputStream
(
mUri
))
{
byte
[]
buffer
=
new
byte
[
4096
];
byte
[]
buffer
=
new
byte
[
4096
];
int
length
;
int
length
;
if
(
target
==
null
)
throw
new
FileNotFoundException
();
if
(
target
==
null
)
throw
new
FileNotFoundException
();
while
((
length
=
in
.
read
(
buffer
))
>
0
)
while
((
length
=
in
.
read
(
buffer
))
>
0
)
target
.
write
(
buffer
,
0
,
length
);
target
.
write
(
buffer
,
0
,
length
);
}
}
...
...
app/src/main/java/com/topjohnwu/magisk/receivers/ManagerUpdate.java
View file @
fd4aaab1
...
@@ -7,7 +7,6 @@ import android.net.Uri;
...
@@ -7,7 +7,6 @@ import android.net.Uri;
import
android.os.Build
;
import
android.os.Build
;
import
android.support.v4.content.FileProvider
;
import
android.support.v4.content.FileProvider
;
import
com.topjohnwu.magisk.MagiskManager
;
import
com.topjohnwu.magisk.utils.Utils
;
import
com.topjohnwu.magisk.utils.Utils
;
import
java.io.File
;
import
java.io.File
;
...
...
app/src/main/java/com/topjohnwu/magisk/utils/ZipUtils.java
View file @
fd4aaab1
package
com
.
topjohnwu
.
magisk
.
utils
;
package
com
.
topjohnwu
.
magisk
.
utils
;
import
android.content.Context
;
import
android.content.Context
;
import
android.util.Pair
;
import
org.spongycastle.asn1.ASN1InputStream
;
import
org.spongycastle.asn1.ASN1InputStream
;
import
org.spongycastle.asn1.ASN1ObjectIdentifier
;
import
org.spongycastle.asn1.ASN1ObjectIdentifier
;
import
org.spongycastle.asn1.DEROutputStream
;
import
org.spongycastle.asn1.DEROutputStream
;
import
org.spongycastle.asn1.cms.CMSObjectIdentifiers
;
import
org.spongycastle.asn1.cms.CMSObjectIdentifiers
;
import
org.spongycastle.asn1.pkcs.PrivateKeyInfo
;
import
org.spongycastle.cert.jcajce.JcaCertStore
;
import
org.spongycastle.cert.jcajce.JcaCertStore
;
import
org.spongycastle.cms.CMSException
;
import
org.spongycastle.cms.CMSException
;
import
org.spongycastle.cms.CMSProcessableByteArray
;
import
org.spongycastle.cms.CMSProcessableByteArray
;
...
@@ -21,8 +21,10 @@ import org.spongycastle.operator.jcajce.JcaContentSignerBuilder;
...
@@ -21,8 +21,10 @@ import org.spongycastle.operator.jcajce.JcaContentSignerBuilder;
import
org.spongycastle.operator.jcajce.JcaDigestCalculatorProviderBuilder
;
import
org.spongycastle.operator.jcajce.JcaDigestCalculatorProviderBuilder
;
import
org.spongycastle.util.encoders.Base64
;
import
org.spongycastle.util.encoders.Base64
;
import
java.io.ByteArrayInputStream
;
import
java.io.ByteArrayOutputStream
;
import
java.io.ByteArrayOutputStream
;
import
java.io.File
;
import
java.io.File
;
import
java.io.FileInputStream
;
import
java.io.FileOutputStream
;
import
java.io.FileOutputStream
;
import
java.io.FilterOutputStream
;
import
java.io.FilterOutputStream
;
import
java.io.IOException
;
import
java.io.IOException
;
...
@@ -31,24 +33,19 @@ import java.io.OutputStream;
...
@@ -31,24 +33,19 @@ import java.io.OutputStream;
import
java.io.PrintStream
;
import
java.io.PrintStream
;
import
java.security.DigestOutputStream
;
import
java.security.DigestOutputStream
;
import
java.security.GeneralSecurityException
;
import
java.security.GeneralSecurityException
;
import
java.security.Key
;
import
java.security.KeyFactory
;
import
java.security.KeyFactory
;
import
java.security.MessageDigest
;
import
java.security.MessageDigest
;
import
java.security.NoSuchAlgorithmException
;
import
java.security.PrivateKey
;
import
java.security.PrivateKey
;
import
java.security.Provider
;
import
java.security.Provider
;
import
java.security.Security
;
import
java.security.Security
;
import
java.security.cert.CertificateEncodingException
;
import
java.security.cert.CertificateEncodingException
;
import
java.security.cert.CertificateFactory
;
import
java.security.cert.CertificateFactory
;
import
java.security.cert.X509Certificate
;
import
java.security.cert.X509Certificate
;
import
java.security.spec.InvalidKeySpecException
;
import
java.security.spec.KeySpec
;
import
java.security.spec.PKCS8EncodedKeySpec
;
import
java.security.spec.PKCS8EncodedKeySpec
;
import
java.util.ArrayList
;
import
java.util.ArrayList
;
import
java.util.Arrays
;
import
java.util.Arrays
;
import
java.util.Collections
;
import
java.util.Collections
;
import
java.util.Enumeration
;
import
java.util.Enumeration
;
import
java.util.Iterator
;
import
java.util.Locale
;
import
java.util.Locale
;
import
java.util.Map
;
import
java.util.Map
;
import
java.util.TreeMap
;
import
java.util.TreeMap
;
...
@@ -60,50 +57,35 @@ import java.util.jar.JarOutputStream;
...
@@ -60,50 +57,35 @@ import java.util.jar.JarOutputStream;
import
java.util.jar.Manifest
;
import
java.util.jar.Manifest
;
import
java.util.regex.Pattern
;
import
java.util.regex.Pattern
;
import
javax.crypto.Cipher
;
/*
import
javax.crypto.EncryptedPrivateKeyInfo
;
* Modified from from AOSP(Marshmallow) SignAPK.java
import
javax.crypto.SecretKeyFactory
;
* */
import
javax.crypto.spec.PBEKeySpec
;
public
class
ZipUtils
{
public
class
ZipUtils
{
private
static
final
String
CERT_SF_NAME
=
"META-INF/CERT.SF"
;
// File name in assets
private
static
final
String
CERT_SIG_NAME
=
"META-INF/CERT.%s"
;
private
static
final
String
OTACERT_NAME
=
"META-INF/com/android/otacert"
;
private
static
final
String
PUBLIC_KEY_NAME
=
"public.certificate.x509.pem"
;
private
static
final
String
PUBLIC_KEY_NAME
=
"public.certificate.x509.pem"
;
private
static
final
String
PRIVATE_KEY_NAME
=
"private.key.pk8"
;
private
static
final
String
PRIVATE_KEY_NAME
=
"private.key.pk8"
;
private
static
final
String
CERT_SF_NAME
=
"META-INF/CERT.SF"
;
private
static
final
String
CERT_SIG_NAME
=
"META-INF/CERT.%s"
;
private
static
Provider
sBouncyCastleProvider
;
private
static
Provider
sBouncyCastleProvider
;
// bitmasks for which hash algorithms we need the manifest to include.
// bitmasks for which hash algorithms we need the manifest to include.
private
static
final
int
USE_SHA1
=
1
;
private
static
final
int
USE_SHA1
=
1
;
private
static
final
int
USE_SHA256
=
2
;
private
static
final
int
USE_SHA256
=
2
;
// Files matching this pattern are not copied to the output.
private
static
Pattern
stripPattern
=
Pattern
.
compile
(
"^(META-INF/((.*)[.](SF|RSA|DSA|EC)|com/android/otacert))|("
+
Pattern
.
quote
(
JarFile
.
MANIFEST_NAME
)
+
")$"
);
static
{
static
{
System
.
loadLibrary
(
"zipadjust"
);
sBouncyCastleProvider
=
new
BouncyCastleProvider
();
sBouncyCastleProvider
=
new
BouncyCastleProvider
();
Security
.
insertProviderAt
(
sBouncyCastleProvider
,
1
);
Security
.
insertProviderAt
(
sBouncyCastleProvider
,
1
);
System
.
loadLibrary
(
"zipadjust"
);
}
}
public
native
static
byte
[]
zipAdjust
(
byte
[]
bytes
,
int
size
);
public
native
static
void
zipAdjust
(
String
filenameIn
,
String
filenameOut
);
public
native
static
void
zipAdjust
(
String
filename
);
// Wrapper function for the JNI function
public
static
void
adjustZip
(
ByteArrayInOutStream
buffer
)
{
buffer
.
setBuffer
(
zipAdjust
(
buffer
.
toByteArray
(),
buffer
.
size
()));
}
public
static
void
adjustZip
(
File
file
)
{
zipAdjust
(
file
.
getPath
());
}
public
static
void
removeTopFolder
(
InputStream
in
,
OutputStream
o
ut
)
throws
IOException
{
public
static
void
removeTopFolder
(
InputStream
in
,
File
outp
ut
)
throws
IOException
{
try
{
try
{
JarInputStream
source
=
new
JarInputStream
(
in
);
JarInputStream
source
=
new
JarInputStream
(
in
);
JarOutputStream
dest
=
new
JarOutputStream
(
out
);
JarOutputStream
dest
=
new
JarOutputStream
(
new
FileOutputStream
(
output
)
);
JarEntry
entry
;
JarEntry
entry
;
String
path
;
String
path
;
int
size
;
int
size
;
...
@@ -133,14 +115,6 @@ public class ZipUtils {
...
@@ -133,14 +115,6 @@ public class ZipUtils {
}
}
}
}
public
static
void
unzip
(
File
file
,
File
folder
)
throws
Exception
{
unzip
(
file
,
folder
,
""
);
}
public
static
void
unzip
(
InputStream
file
,
File
folder
)
throws
Exception
{
unzip
(
file
,
folder
,
""
);
}
public
static
void
unzip
(
File
file
,
File
folder
,
String
path
)
throws
Exception
{
public
static
void
unzip
(
File
file
,
File
folder
,
String
path
)
throws
Exception
{
int
count
;
int
count
;
FileOutputStream
out
;
FileOutputStream
out
;
...
@@ -163,7 +137,7 @@ public class ZipUtils {
...
@@ -163,7 +137,7 @@ public class ZipUtils {
dest
.
createNewFile
();
dest
.
createNewFile
();
}
}
out
=
new
FileOutputStream
(
dest
);
out
=
new
FileOutputStream
(
dest
);
while
((
count
=
is
.
read
(
data
,
0
,
4096
))
!=
-
1
)
{
while
((
count
=
is
.
read
(
data
))
!=
-
1
)
{
out
.
write
(
data
,
0
,
count
);
out
.
write
(
data
,
0
,
count
);
}
}
out
.
flush
();
out
.
flush
();
...
@@ -176,95 +150,27 @@ public class ZipUtils {
...
@@ -176,95 +150,27 @@ public class ZipUtils {
}
}
}
}
public
static
void
unzip
(
InputStream
file
,
File
folder
,
String
path
)
throws
Exception
{
public
static
void
signZip
(
Context
context
,
File
input
,
File
output
,
boolean
minSign
)
{
int
count
;
int
alignment
=
4
;
FileOutputStream
out
;
JarFile
inputJar
=
null
;
File
dest
;
FileOutputStream
outputFile
=
null
;
JarEntry
entry
;
byte
data
[]
=
new
byte
[
4096
];
try
(
JarInputStream
zipfile
=
new
JarInputStream
(
file
))
{
while
((
entry
=
zipfile
.
getNextJarEntry
())
!=
null
)
{
if
(!
entry
.
getName
().
contains
(
path
)
||
entry
.
isDirectory
())
{
// Ignore directories, only create files
continue
;
}
Logger
.
dev
(
"ZipUtils: Extracting: "
+
entry
);
dest
=
new
File
(
folder
,
entry
.
getName
());
if
(
dest
.
getParentFile
().
mkdirs
())
{
dest
.
createNewFile
();
}
out
=
new
FileOutputStream
(
dest
);
while
((
count
=
zipfile
.
read
(
data
,
0
,
4096
))
!=
-
1
)
{
out
.
write
(
data
,
0
,
count
);
}
out
.
flush
();
out
.
close
();
}
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
throw
e
;
}
}
public
static
void
signZip
(
Context
context
,
File
input
,
OutputStream
outputStream
,
boolean
signWholeFile
)
throws
Exception
{
JarFile
inputJar
=
new
JarFile
(
input
);
int
hashes
=
0
;
int
hashes
=
0
;
try
{
try
{
X509Certificate
publicKey
=
GeneralUtils
.
readPublicKey
(
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
));
X509Certificate
publicKey
=
readPublicKey
(
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
));
hashes
|=
FileUtils
.
getDigestAlgorithm
(
publicKey
);
hashes
|=
getDigestAlgorithm
(
publicKey
);
// Set the ZIP file timestamp to the starting valid time
// of the 0th certificate plus one hour (to match what
// we've historically done).
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
PrivateKey
privateKey
=
GeneralUtils
.
readPrivateKey
(
context
.
getAssets
().
open
(
PRIVATE_KEY_NAME
));
if
(
signWholeFile
)
{
if
(!
"RSA"
.
equalsIgnoreCase
(
privateKey
.
getAlgorithm
()))
{
throw
new
IOException
(
"Cannot sign OTA packages with non-RSA keys"
);
}
FileUtils
.
signWholeFile
(
inputJar
,
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
),
publicKey
,
privateKey
,
outputStream
);
}
else
{
JarOutputStream
outputJar
=
new
JarOutputStream
(
outputStream
);
// For signing .apks, use the maximum compression to make
// them as small as possible (since they live forever on
// the system partition). For OTA packages, use the
// default compression level, which is much much faster
// and produces output that is only a tiny bit larger
// (~0.1% on full OTA packages I tested).
outputJar
.
setLevel
(
9
);
Manifest
manifest
=
FileUtils
.
addDigestsToManifest
(
inputJar
,
hashes
);
FileUtils
.
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
);
GeneralUtils
.
signFile
(
manifest
,
publicKey
,
privateKey
,
outputJar
);
outputJar
.
close
();
}
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
throw
e
;
}
}
public
static
void
signZip
(
Context
context
,
InputStream
inputStream
,
OutputStream
outputStream
,
boolean
signWholeFile
)
throws
Exception
{
StreamUtils
.
JarMap
inputJar
;
int
hashes
=
0
;
try
{
X509Certificate
publicKey
=
GeneralUtils
.
readPublicKey
(
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
));
hashes
|=
FileUtils
.
getDigestAlgorithm
(
publicKey
);
// Set the ZIP file timestamp to the starting valid time
// Set the ZIP file timestamp to the starting valid time
// of the 0th certificate plus one hour (to match what
// of the 0th certificate plus one hour (to match what
// we've historically done).
// we've historically done).
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
PrivateKey
privateKey
=
GeneralUtils
.
readPrivateKey
(
context
.
getAssets
().
open
(
PRIVATE_KEY_NAME
));
PrivateKey
privateKey
=
readPrivateKey
(
context
.
getAssets
().
open
(
PRIVATE_KEY_NAME
));
inputJar
=
new
StreamUtils
.
JarMap
(
new
JarInputStream
(
inputStream
));
if
(
signWholeFile
)
{
outputFile
=
new
FileOutputStream
(
output
);
if
(!
"RSA"
.
equalsIgnoreCase
(
privateKey
.
getAlgorithm
()))
{
if
(
minSign
)
{
throw
new
IOException
(
"Cannot sign OTA packages with non-RSA keys"
);
ZipUtils
.
signWholeFile
(
input
,
publicKey
,
privateKey
,
outputFile
);
}
StreamUtils
.
signWholeFile
(
inputJar
,
context
.
getAssets
().
open
(
PUBLIC_KEY_NAME
),
publicKey
,
privateKey
,
outputStream
);
}
else
{
}
else
{
JarOutputStream
outputJar
=
new
JarOutputStream
(
outputStream
);
inputJar
=
new
JarFile
(
input
,
false
);
// Don't verify.
JarOutputStream
outputJar
=
new
JarOutputStream
(
outputFile
);
// For signing .apks, use the maximum compression to make
// For signing .apks, use the maximum compression to make
// them as small as possible (since they live forever on
// them as small as possible (since they live forever on
// the system partition). For OTA packages, use the
// the system partition). For OTA packages, use the
...
@@ -272,794 +178,450 @@ public class ZipUtils {
...
@@ -272,794 +178,450 @@ public class ZipUtils {
// and produces output that is only a tiny bit larger
// and produces output that is only a tiny bit larger
// (~0.1% on full OTA packages I tested).
// (~0.1% on full OTA packages I tested).
outputJar
.
setLevel
(
9
);
outputJar
.
setLevel
(
9
);
Manifest
manifest
=
StreamUtils
.
addDigestsToManifest
(
inputJar
,
hashes
);
Manifest
manifest
=
addDigestsToManifest
(
inputJar
,
hashes
);
StreamUtils
.
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
);
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
,
alignment
);
GeneralUtils
.
signFile
(
manifest
,
publicKey
,
privateKey
,
outputJar
);
signFile
(
manifest
,
inputJar
,
publicKey
,
privateKey
,
outputJar
);
outputJar
.
close
();
outputJar
.
close
();
}
}
}
catch
(
Exception
e
)
{
}
catch
(
Exception
e
)
{
e
.
printStackTrace
();
e
.
printStackTrace
();
throw
e
;
}
finally
{
try
{
if
(
inputJar
!=
null
)
inputJar
.
close
();
if
(
outputFile
!=
null
)
outputFile
.
close
();
}
catch
(
IOException
e
)
{
e
.
printStackTrace
();
}
}
}
}
}
// This class host general functions
public
static
class
GeneralUtils
{
/** Returns the expected signature algorithm for this key type. */
/**
private
static
String
getSignatureAlgorithm
(
X509Certificate
cert
)
{
* Return one of USE_SHA1 or USE_SHA256 according to the signature
String
sigAlg
=
cert
.
getSigAlgName
().
toUpperCase
(
Locale
.
US
);
* algorithm specified in the cert.
String
keyType
=
cert
.
getPublicKey
().
getAlgorithm
().
toUpperCase
(
Locale
.
US
);
*/
if
(
"RSA"
.
equalsIgnoreCase
(
keyType
))
{
private
static
int
getDigestAlgorithm
(
X509Certificate
cert
)
{
if
(
FileUtils
.
getDigestAlgorithm
(
cert
)
==
USE_SHA256
)
{
String
sigAlg
=
cert
.
getSigAlgName
().
toUpperCase
(
Locale
.
US
);
return
"SHA256withRSA"
;
if
(
"SHA1WITHRSA"
.
equals
(
sigAlg
)
||
}
else
{
"MD5WITHRSA"
.
equals
(
sigAlg
))
{
// see "HISTORICAL NOTE" above.
return
"SHA1withRSA"
;
return
USE_SHA1
;
}
}
else
if
(
sigAlg
.
startsWith
(
"SHA256WITH"
))
{
}
else
if
(
"DSA"
.
equalsIgnoreCase
(
keyType
))
{
return
USE_SHA256
;
return
"SHA256withDSA"
;
}
else
{
}
else
if
(
"EC"
.
equalsIgnoreCase
(
keyType
))
{
throw
new
IllegalArgumentException
(
"unsupported signature algorithm \""
+
sigAlg
+
return
"SHA256withECDSA"
;
"\" in cert ["
+
cert
.
getSubjectDN
());
}
}
/** Returns the expected signature algorithm for this key type. */
private
static
String
getSignatureAlgorithm
(
X509Certificate
cert
)
{
String
sigAlg
=
cert
.
getSigAlgName
().
toUpperCase
(
Locale
.
US
);
String
keyType
=
cert
.
getPublicKey
().
getAlgorithm
().
toUpperCase
(
Locale
.
US
);
if
(
"RSA"
.
equalsIgnoreCase
(
keyType
))
{
if
(
getDigestAlgorithm
(
cert
)
==
USE_SHA256
)
{
return
"SHA256withRSA"
;
}
else
{
}
else
{
throw
new
IllegalArgumentException
(
"unsupported key type: "
+
keyType
)
;
return
"SHA1withRSA"
;
}
}
}
else
if
(
"EC"
.
equalsIgnoreCase
(
keyType
))
{
return
"SHA256withECDSA"
;
}
else
{
throw
new
IllegalArgumentException
(
"unsupported key type: "
+
keyType
);
}
}
}
private
static
X509Certificate
readPublicKey
(
InputStream
input
)
// Files matching this pattern are not copied to the output.
throws
IOException
,
GeneralSecurityException
{
private
static
Pattern
stripPattern
=
try
{
Pattern
.
compile
(
"^(META-INF/((.*)[.](SF|RSA|DSA|EC)|com/android/otacert))|("
+
CertificateFactory
cf
=
CertificateFactory
.
getInstance
(
"X.509"
);
Pattern
.
quote
(
JarFile
.
MANIFEST_NAME
)
+
")$"
);
return
(
X509Certificate
)
cf
.
generateCertificate
(
input
);
private
static
X509Certificate
readPublicKey
(
InputStream
input
)
}
finally
{
throws
IOException
,
GeneralSecurityException
{
input
.
close
();
try
{
}
CertificateFactory
cf
=
CertificateFactory
.
getInstance
(
"X.509"
);
return
(
X509Certificate
)
cf
.
generateCertificate
(
input
);
}
finally
{
input
.
close
();
}
}
}
/**
/** Read a PKCS#8 format private key. */
* Decrypt an encrypted PKCS 8 format private key.
private
static
PrivateKey
readPrivateKey
(
InputStream
input
)
*
throws
IOException
,
GeneralSecurityException
{
* Based on ghstark's post on Aug 6, 2006 at
try
{
* http://forums.sun.com/thread.jspa?threadID=758133&messageID=4330949
byte
[]
buffer
=
new
byte
[
4096
];
*
int
size
=
input
.
read
(
buffer
);
* @param encryptedPrivateKey The raw data of the private key
byte
[]
bytes
=
Arrays
.
copyOf
(
buffer
,
size
);
* @param keyFile The file containing the private key
/* Check to see if this is in an EncryptedPrivateKeyInfo structure. */
*/
PKCS8EncodedKeySpec
spec
=
new
PKCS8EncodedKeySpec
(
bytes
);
private
static
KeySpec
decryptPrivateKey
(
byte
[]
encryptedPrivateKey
,
File
keyFile
)
/*
throws
GeneralSecurityException
{
* Now it's in a PKCS#8 PrivateKeyInfo structure. Read its Algorithm
EncryptedPrivateKeyInfo
epkInfo
;
* OID and use that to construct a KeyFactory.
try
{
*/
epkInfo
=
new
EncryptedPrivateKeyInfo
(
encryptedPrivateKey
);
ASN1InputStream
bIn
=
new
ASN1InputStream
(
new
ByteArrayInputStream
(
spec
.
getEncoded
()));
}
catch
(
IOException
ex
)
{
PrivateKeyInfo
pki
=
PrivateKeyInfo
.
getInstance
(
bIn
.
readObject
());
// Probably not an encrypted key.
String
algOid
=
pki
.
getPrivateKeyAlgorithm
().
getAlgorithm
().
getId
();
return
null
;
return
KeyFactory
.
getInstance
(
algOid
).
generatePrivate
(
spec
);
}
}
finally
{
// We no longer have console, so need to use another way to input password
input
.
close
();
// This function is left here if needed in the future, so no use for now
char
[]
password
=
new
char
[
0
];
SecretKeyFactory
skFactory
=
SecretKeyFactory
.
getInstance
(
epkInfo
.
getAlgName
());
Key
key
=
skFactory
.
generateSecret
(
new
PBEKeySpec
(
password
));
Cipher
cipher
=
Cipher
.
getInstance
(
epkInfo
.
getAlgName
());
cipher
.
init
(
Cipher
.
DECRYPT_MODE
,
key
,
epkInfo
.
getAlgParameters
());
try
{
return
epkInfo
.
getKeySpec
(
cipher
);
}
catch
(
InvalidKeySpecException
ex
)
{
System
.
err
.
println
(
"signapk: Password for "
+
keyFile
+
" may be bad."
);
throw
ex
;
}
}
}
}
/** Read a PKCS 8 format private key. */
/**
private
static
PrivateKey
readPrivateKey
(
InputStream
input
)
* Add the hash(es) of every file to the manifest, creating it if
throws
IOException
,
GeneralSecurityException
{
* necessary.
try
{
*/
byte
[]
buffer
=
new
byte
[
4096
];
private
static
Manifest
addDigestsToManifest
(
JarFile
jar
,
int
hashes
)
int
size
=
input
.
read
(
buffer
);
throws
IOException
,
GeneralSecurityException
{
byte
[]
bytes
=
Arrays
.
copyOf
(
buffer
,
size
);
Manifest
input
=
jar
.
getManifest
();
KeySpec
spec
=
new
PKCS8EncodedKeySpec
(
bytes
);
Manifest
output
=
new
Manifest
();
PrivateKey
key
;
Attributes
main
=
output
.
getMainAttributes
();
key
=
decodeAsKeyType
(
spec
,
"RSA"
);
if
(
input
!=
null
)
{
if
(
key
!=
null
)
{
main
.
putAll
(
input
.
getMainAttributes
());
return
key
;
}
else
{
main
.
putValue
(
"Manifest-Version"
,
"1.0"
);
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
}
MessageDigest
md_sha1
=
null
;
MessageDigest
md_sha256
=
null
;
if
((
hashes
&
USE_SHA1
)
!=
0
)
{
md_sha1
=
MessageDigest
.
getInstance
(
"SHA1"
);
}
if
((
hashes
&
USE_SHA256
)
!=
0
)
{
md_sha256
=
MessageDigest
.
getInstance
(
"SHA256"
);
}
byte
[]
buffer
=
new
byte
[
4096
];
int
num
;
// We sort the input entries by name, and add them to the
// output manifest in sorted order. We expect that the output
// map will be deterministic.
TreeMap
<
String
,
JarEntry
>
byName
=
new
TreeMap
<
String
,
JarEntry
>();
for
(
Enumeration
<
JarEntry
>
e
=
jar
.
entries
();
e
.
hasMoreElements
();
)
{
JarEntry
entry
=
e
.
nextElement
();
byName
.
put
(
entry
.
getName
(),
entry
);
}
for
(
JarEntry
entry:
byName
.
values
())
{
String
name
=
entry
.
getName
();
if
(!
entry
.
isDirectory
()
&&
(
stripPattern
==
null
||
!
stripPattern
.
matcher
(
name
).
matches
()))
{
InputStream
data
=
jar
.
getInputStream
(
entry
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
if
(
md_sha1
!=
null
)
md_sha1
.
update
(
buffer
,
0
,
num
);
if
(
md_sha256
!=
null
)
md_sha256
.
update
(
buffer
,
0
,
num
);
}
}
key
=
decodeAsKeyType
(
spec
,
"DSA"
);
Attributes
attr
=
null
;
if
(
key
!=
null
)
{
if
(
input
!=
null
)
attr
=
input
.
getAttributes
(
name
);
return
key
;
attr
=
attr
!=
null
?
new
Attributes
(
attr
)
:
new
Attributes
();
if
(
md_sha1
!=
null
)
{
attr
.
putValue
(
"SHA1-Digest"
,
new
String
(
Base64
.
encode
(
md_sha1
.
digest
()),
"ASCII"
));
}
}
key
=
decodeAsKeyType
(
spec
,
"EC"
);
if
(
md_sha256
!=
null
)
{
if
(
key
!=
null
)
{
attr
.
putValue
(
"SHA-256-Digest"
,
return
key
;
new
String
(
Base64
.
encode
(
md_sha256
.
digest
()),
"ASCII"
))
;
}
}
throw
new
NoSuchAlgorithmException
(
"Must be an RSA, DSA, or EC key"
);
output
.
getEntries
().
put
(
name
,
attr
);
}
finally
{
input
.
close
();
}
}
}
}
return
output
;
}
private
static
PrivateKey
decodeAsKeyType
(
KeySpec
spec
,
String
keyType
)
/** Write to another stream and track how many bytes have been
throws
GeneralSecurityException
{
* written.
try
{
*/
return
KeyFactory
.
getInstance
(
keyType
).
generatePrivate
(
spec
);
private
static
class
CountOutputStream
extends
FilterOutputStream
{
}
catch
(
InvalidKeySpecException
e
)
{
private
int
mCount
;
return
null
;
public
CountOutputStream
(
OutputStream
out
)
{
}
super
(
out
);
mCount
=
0
;
}
}
@Override
/**
public
void
write
(
int
b
)
throws
IOException
{
* Add a copy of the public key to the archive; this should
super
.
write
(
b
);
* exactly match one of the files in
mCount
++;
* /system/etc/security/otacerts.zip on the device. (The same
* cert can be extracted from the CERT.RSA file but this is much
* easier to get at.)
*/
private
static
void
addOtacert
(
JarOutputStream
outputJar
,
InputStream
input
,
long
timestamp
,
Manifest
manifest
,
int
hash
)
throws
IOException
,
GeneralSecurityException
{
MessageDigest
md
=
MessageDigest
.
getInstance
(
hash
==
USE_SHA1
?
"SHA1"
:
"SHA256"
);
JarEntry
je
=
new
JarEntry
(
OTACERT_NAME
);
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
byte
[]
b
=
new
byte
[
4096
];
int
read
;
while
((
read
=
input
.
read
(
b
))
!=
-
1
)
{
outputJar
.
write
(
b
,
0
,
read
);
md
.
update
(
b
,
0
,
read
);
}
input
.
close
();
Attributes
attr
=
new
Attributes
();
attr
.
putValue
(
hash
==
USE_SHA1
?
"SHA1-Digest"
:
"SHA-256-Digest"
,
new
String
(
Base64
.
encode
(
md
.
digest
()),
"ASCII"
));
manifest
.
getEntries
().
put
(
OTACERT_NAME
,
attr
);
}
}
@Override
/** Write a .SF file with a digest of the specified manifest. */
public
void
write
(
byte
[]
b
,
int
off
,
int
len
)
throws
IOException
{
private
static
void
writeSignatureFile
(
Manifest
manifest
,
OutputStream
out
,
super
.
write
(
b
,
off
,
len
);
int
hash
)
mCount
+=
len
;
throws
IOException
,
GeneralSecurityException
{
}
Manifest
sf
=
new
Manifest
();
public
int
size
()
{
Attributes
main
=
sf
.
getMainAttributes
();
return
mCount
;
main
.
putValue
(
"Signature-Version"
,
"1.0"
);
}
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
}
MessageDigest
md
=
MessageDigest
.
getInstance
(
/** Write a .SF file with a digest of the specified manifest. */
hash
==
USE_SHA256
?
"SHA256"
:
"SHA1"
);
private
static
void
writeSignatureFile
(
Manifest
manifest
,
OutputStream
out
,
PrintStream
print
=
new
PrintStream
(
int
hash
)
new
DigestOutputStream
(
new
ByteArrayOutputStream
(),
md
),
throws
IOException
,
GeneralSecurityException
{
true
,
"UTF-8"
);
Manifest
sf
=
new
Manifest
();
// Digest of the entire manifest
Attributes
main
=
sf
.
getMainAttributes
();
manifest
.
write
(
print
);
main
.
putValue
(
"Signature-Version"
,
"1.0"
);
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
MessageDigest
md
=
MessageDigest
.
getInstance
(
hash
==
USE_SHA256
?
"SHA256"
:
"SHA1"
);
PrintStream
print
=
new
PrintStream
(
new
DigestOutputStream
(
new
ByteArrayOutputStream
(),
md
),
true
,
"UTF-8"
);
// Digest of the entire manifest
manifest
.
write
(
print
);
print
.
flush
();
main
.
putValue
(
hash
==
USE_SHA256
?
"SHA-256-Digest-Manifest"
:
"SHA1-Digest-Manifest"
,
new
String
(
Base64
.
encode
(
md
.
digest
()),
"ASCII"
));
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
for
(
Map
.
Entry
<
String
,
Attributes
>
entry
:
entries
.
entrySet
())
{
// Digest of the manifest stanza for this entry.
print
.
print
(
"Name: "
+
entry
.
getKey
()
+
"\r\n"
);
for
(
Map
.
Entry
<
Object
,
Object
>
att
:
entry
.
getValue
().
entrySet
())
{
print
.
print
(
att
.
getKey
()
+
": "
+
att
.
getValue
()
+
"\r\n"
);
}
print
.
print
(
"\r\n"
);
print
.
flush
();
print
.
flush
();
main
.
putValue
(
hash
==
USE_SHA256
?
"SHA-256-Digest-Manifest"
:
"SHA1-Digest-Manifest"
,
Attributes
sfAttr
=
new
Attributes
();
sfAttr
.
putValue
(
hash
==
USE_SHA256
?
"SHA-256-Digest"
:
"SHA1-Digest-Manifest"
,
new
String
(
Base64
.
encode
(
md
.
digest
()),
"ASCII"
));
new
String
(
Base64
.
encode
(
md
.
digest
()),
"ASCII"
));
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
sf
.
getEntries
().
put
(
entry
.
getKey
(),
sfAttr
);
for
(
Map
.
Entry
<
String
,
Attributes
>
entry
:
entries
.
entrySet
())
{
// Digest of the manifest stanza for this entry.
print
.
print
(
"Name: "
+
entry
.
getKey
()
+
"\r\n"
);
for
(
Map
.
Entry
<
Object
,
Object
>
att
:
entry
.
getValue
().
entrySet
())
{
print
.
print
(
att
.
getKey
()
+
": "
+
att
.
getValue
()
+
"\r\n"
);
}
print
.
print
(
"\r\n"
);
print
.
flush
();
Attributes
sfAttr
=
new
Attributes
();
sfAttr
.
putValue
(
hash
==
USE_SHA256
?
"SHA-256-Digest"
:
"SHA1-Digest-Manifest"
,
new
String
(
Base64
.
encode
(
md
.
digest
()),
"ASCII"
));
sf
.
getEntries
().
put
(
entry
.
getKey
(),
sfAttr
);
}
CountOutputStream
cout
=
new
CountOutputStream
(
out
);
sf
.
write
(
cout
);
// A bug in the java.util.jar implementation of Android platforms
// up to version 1.6 will cause a spurious IOException to be thrown
// if the length of the signature file is a multiple of 1024 bytes.
// As a workaround, add an extra CRLF in this case.
if
((
cout
.
size
()
%
1024
)
==
0
)
{
cout
.
write
(
'\r'
);
cout
.
write
(
'\n'
);
}
}
}
CountOutputStream
cout
=
new
CountOutputStream
(
out
);
/** Sign data and write the digital signature to 'out'. */
sf
.
write
(
cout
);
private
static
void
writeSignatureBlock
(
// A bug in the java.util.jar implementation of Android platforms
CMSTypedData
data
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
out
)
// up to version 1.6 will cause a spurious IOException to be thrown
throws
IOException
,
CertificateEncodingException
,
OperatorCreationException
,
CMSException
{
// if the length of the signature file is a multiple of 1024 bytes.
ArrayList
<
X509Certificate
>
certList
=
new
ArrayList
<
X509Certificate
>(
1
);
// As a workaround, add an extra CRLF in this case.
certList
.
add
(
publicKey
);
if
((
cout
.
size
()
%
1024
)
==
0
)
{
JcaCertStore
certs
=
new
JcaCertStore
(
certList
);
cout
.
write
(
'\r'
);
CMSSignedDataGenerator
gen
=
new
CMSSignedDataGenerator
();
cout
.
write
(
'\n'
);
ContentSigner
signer
=
new
JcaContentSignerBuilder
(
getSignatureAlgorithm
(
publicKey
))
.
setProvider
(
sBouncyCastleProvider
)
.
build
(
privateKey
);
gen
.
addSignerInfoGenerator
(
new
JcaSignerInfoGeneratorBuilder
(
new
JcaDigestCalculatorProviderBuilder
()
.
setProvider
(
sBouncyCastleProvider
)
.
build
())
.
setDirectSignature
(
true
)
.
build
(
signer
,
publicKey
));
gen
.
addCertificates
(
certs
);
CMSSignedData
sigData
=
gen
.
generate
(
data
,
false
);
try
(
ASN1InputStream
asn1
=
new
ASN1InputStream
(
sigData
.
getEncoded
()))
{
DEROutputStream
dos
=
new
DEROutputStream
(
out
);
dos
.
writeObject
(
asn1
.
readObject
());
}
}
}
}
private
static
void
signFile
(
Manifest
manifest
,
/** Sign data and write the digital signature to 'out'. */
X509Certificate
publicKey
,
PrivateKey
privateKey
,
private
static
void
writeSignatureBlock
(
JarOutputStream
outputJar
)
CMSTypedData
data
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
throws
Exception
{
OutputStream
out
)
// Assume the certificate is valid for at least an hour.
throws
IOException
,
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
CertificateEncodingException
,
// MANIFEST.MF
OperatorCreationException
,
JarEntry
je
=
new
JarEntry
(
JarFile
.
MANIFEST_NAME
);
CMSException
{
je
.
setTime
(
timestamp
);
ArrayList
<
X509Certificate
>
certList
=
new
ArrayList
<>(
1
);
outputJar
.
putNextEntry
(
je
);
certList
.
add
(
publicKey
);
manifest
.
write
(
outputJar
);
JcaCertStore
certs
=
new
JcaCertStore
(
certList
);
// CERT.SF / CERT#.SF
CMSSignedDataGenerator
gen
=
new
CMSSignedDataGenerator
();
je
=
new
JarEntry
(
CERT_SF_NAME
);
ContentSigner
signer
=
new
JcaContentSignerBuilder
(
getSignatureAlgorithm
(
publicKey
))
je
.
setTime
(
timestamp
);
.
setProvider
(
sBouncyCastleProvider
)
outputJar
.
putNextEntry
(
je
);
.
build
(
privateKey
);
ByteArrayOutputStream
baos
=
new
ByteArrayOutputStream
();
gen
.
addSignerInfoGenerator
(
writeSignatureFile
(
manifest
,
baos
,
FileUtils
.
getDigestAlgorithm
(
publicKey
));
new
JcaSignerInfoGeneratorBuilder
(
byte
[]
signedData
=
baos
.
toByteArray
();
new
JcaDigestCalculatorProviderBuilder
()
outputJar
.
write
(
signedData
);
.
setProvider
(
sBouncyCastleProvider
)
// CERT.{DSA,EC,RSA} / CERT#.{DSA,EC,RSA}
.
build
())
je
=
new
JarEntry
((
String
.
format
(
CERT_SIG_NAME
,
privateKey
.
getAlgorithm
())));
.
setDirectSignature
(
true
)
je
.
setTime
(
timestamp
);
.
build
(
signer
,
publicKey
));
outputJar
.
putNextEntry
(
je
);
gen
.
addCertificates
(
certs
);
writeSignatureBlock
(
new
CMSProcessableByteArray
(
signedData
),
CMSSignedData
sigData
=
gen
.
generate
(
data
,
false
);
publicKey
,
privateKey
,
outputJar
);
ASN1InputStream
asn1
=
new
ASN1InputStream
(
sigData
.
getEncoded
());
DEROutputStream
dos
=
new
DEROutputStream
(
out
);
dos
.
writeObject
(
asn1
.
readObject
());
}
/**
* Copy all the files in a manifest from input to output. We set
* the modification times in the output to a fixed time, so as to
* reduce variation in the output file and make incremental OTAs
* more efficient.
*/
private
static
void
copyFiles
(
Manifest
manifest
,
JarFile
in
,
JarOutputStream
out
,
long
timestamp
,
int
alignment
)
throws
IOException
{
byte
[]
buffer
=
new
byte
[
4096
];
int
num
;
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
ArrayList
<
String
>
names
=
new
ArrayList
<
String
>(
entries
.
keySet
());
Collections
.
sort
(
names
);
boolean
firstEntry
=
true
;
long
offset
=
0L
;
// We do the copy in two passes -- first copying all the
// entries that are STORED, then copying all the entries that
// have any other compression flag (which in practice means
// DEFLATED). This groups all the stored entries together at
// the start of the file and makes it easier to do alignment
// on them (since only stored entries are aligned).
for
(
String
name
:
names
)
{
JarEntry
inEntry
=
in
.
getJarEntry
(
name
);
JarEntry
outEntry
=
null
;
if
(
inEntry
.
getMethod
()
!=
JarEntry
.
STORED
)
continue
;
// Preserve the STORED method of the input entry.
outEntry
=
new
JarEntry
(
inEntry
);
outEntry
.
setTime
(
timestamp
);
// 'offset' is the offset into the file at which we expect
// the file data to begin. This is the value we need to
// make a multiple of 'alignement'.
offset
+=
JarFile
.
LOCHDR
+
outEntry
.
getName
().
length
();
if
(
firstEntry
)
{
// The first entry in a jar file has an extra field of
// four bytes that you can't get rid of; any extra
// data you specify in the JarEntry is appended to
// these forced four bytes. This is JAR_MAGIC in
// JarOutputStream; the bytes are 0xfeca0000.
offset
+=
4
;
firstEntry
=
false
;
}
if
(
alignment
>
0
&&
(
offset
%
alignment
!=
0
))
{
// Set the "extra data" of the entry to between 1 and
// alignment-1 bytes, to make the file data begin at
// an aligned offset.
int
needed
=
alignment
-
(
int
)(
offset
%
alignment
);
outEntry
.
setExtra
(
new
byte
[
needed
]);
offset
+=
needed
;
}
out
.
putNextEntry
(
outEntry
);
InputStream
data
=
in
.
getInputStream
(
inEntry
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
out
.
write
(
buffer
,
0
,
num
);
offset
+=
num
;
}
out
.
flush
();
}
}
// Copy all the non-STORED entries. We don't attempt to
/** Write to another stream and track how many bytes have been
// maintain the 'offset' variable past this point; we don't do
* written.
// alignment on these entries.
*/
for
(
String
name
:
names
)
{
private
static
class
CountOutputStream
extends
FilterOutputStream
{
JarEntry
inEntry
=
in
.
getJarEntry
(
name
);
private
int
mCount
;
JarEntry
outEntry
=
null
;
public
CountOutputStream
(
OutputStream
out
)
{
if
(
inEntry
.
getMethod
()
==
JarEntry
.
STORED
)
continue
;
super
(
out
);
// Create a new entry so that the compressed len is recomputed.
mCount
=
0
;
outEntry
=
new
JarEntry
(
name
);
}
outEntry
.
setTime
(
timestamp
);
@Override
out
.
putNextEntry
(
outEntry
);
public
void
write
(
int
b
)
throws
IOException
{
InputStream
data
=
in
.
getInputStream
(
inEntry
);
super
.
write
(
b
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
mCount
++;
out
.
write
(
buffer
,
0
,
num
);
}
}
@Override
out
.
flush
();
public
void
write
(
byte
[]
b
,
int
off
,
int
len
)
throws
IOException
{
super
.
write
(
b
,
off
,
len
);
mCount
+=
len
;
}
public
int
size
()
{
return
mCount
;
}
}
private
static
class
WholeFileSignerOutputStream
extends
FilterOutputStream
{
private
boolean
closing
=
false
;
private
ByteArrayOutputStream
footer
=
new
ByteArrayOutputStream
();
private
OutputStream
tee
;
public
WholeFileSignerOutputStream
(
OutputStream
out
,
OutputStream
tee
)
{
super
(
out
);
this
.
tee
=
tee
;
}
public
void
notifyClosing
()
{
closing
=
true
;
}
public
void
finish
()
throws
IOException
{
closing
=
false
;
byte
[]
data
=
footer
.
toByteArray
();
if
(
data
.
length
<
2
)
throw
new
IOException
(
"Less than two bytes written to footer"
);
write
(
data
,
0
,
data
.
length
-
2
);
}
public
byte
[]
getTail
()
{
return
footer
.
toByteArray
();
}
@Override
public
void
write
(
byte
[]
b
)
throws
IOException
{
write
(
b
,
0
,
b
.
length
);
}
@Override
public
void
write
(
byte
[]
b
,
int
off
,
int
len
)
throws
IOException
{
if
(
closing
)
{
// if the jar is about to close, save the footer that will be written
footer
.
write
(
b
,
off
,
len
);
}
else
{
// write to both output streams. out is the CMSTypedData signer and tee is the file.
out
.
write
(
b
,
off
,
len
);
tee
.
write
(
b
,
off
,
len
);
}
}
@Override
public
void
write
(
int
b
)
throws
IOException
{
if
(
closing
)
{
// if the jar is about to close, save the footer that will be written
footer
.
write
(
b
);
}
else
{
// write to both output streams. out is the CMSTypedData signer and tee is the file.
out
.
write
(
b
);
tee
.
write
(
b
);
}
}
}
}
}
}
// This class host functions that consumes JarFiles
// This class is to provide a file's content, but trimming out the last two bytes
public
static
class
FileUtils
{
// Used for signWholeFile
private
static
class
CMSProcessableFile
implements
CMSTypedData
{
/**
private
File
file
;
* Return one of USE_SHA1 or USE_SHA256 according to the signature
private
ASN1ObjectIdentifier
type
;
* algorithm specified in the cert.
private
byte
[]
buffer
;
*/
int
bufferSize
=
0
;
private
static
int
getDigestAlgorithm
(
X509Certificate
cert
)
{
String
sigAlg
=
cert
.
getSigAlgName
().
toUpperCase
(
Locale
.
US
);
CMSProcessableFile
(
File
file
)
{
if
(
"SHA1WITHRSA"
.
equals
(
sigAlg
)
||
this
.
file
=
file
;
"MD5WITHRSA"
.
equals
(
sigAlg
))
{
// see "HISTORICAL NOTE" above.
type
=
new
ASN1ObjectIdentifier
(
CMSObjectIdentifiers
.
data
.
getId
());
return
USE_SHA1
;
buffer
=
new
byte
[
4096
];
}
else
if
(
sigAlg
.
startsWith
(
"SHA256WITH"
))
{
return
USE_SHA256
;
}
else
{
throw
new
IllegalArgumentException
(
"unsupported signature algorithm \""
+
sigAlg
+
"\" in cert ["
+
cert
.
getSubjectDN
());
}
}
}
private
static
Manifest
addDigestsToManifest
(
JarFile
jar
,
int
hashes
)
@Override
throws
IOException
,
GeneralSecurityException
{
public
ASN1ObjectIdentifier
getContentType
()
{
Manifest
input
=
jar
.
getManifest
();
return
type
;
Manifest
output
=
new
Manifest
();
Attributes
main
=
output
.
getMainAttributes
();
if
(
input
!=
null
)
{
main
.
putAll
(
input
.
getMainAttributes
());
}
else
{
main
.
putValue
(
"Manifest-Version"
,
"1.0"
);
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
}
MessageDigest
md_sha1
=
null
;
MessageDigest
md_sha256
=
null
;
if
((
hashes
&
USE_SHA1
)
!=
0
)
{
md_sha1
=
MessageDigest
.
getInstance
(
"SHA1"
);
}
if
((
hashes
&
USE_SHA256
)
!=
0
)
{
md_sha256
=
MessageDigest
.
getInstance
(
"SHA256"
);
}
byte
[]
buffer
=
new
byte
[
4096
];
int
num
;
// We sort the input entries by name, and add them to the
// output manifest in sorted order. We expect that the output
// map will be deterministic.
TreeMap
<
String
,
JarEntry
>
byName
=
new
TreeMap
<
String
,
JarEntry
>();
for
(
Enumeration
<
JarEntry
>
e
=
jar
.
entries
();
e
.
hasMoreElements
();
)
{
JarEntry
entry
=
e
.
nextElement
();
byName
.
put
(
entry
.
getName
(),
entry
);
}
for
(
JarEntry
entry:
byName
.
values
())
{
String
name
=
entry
.
getName
();
if
(!
entry
.
isDirectory
()
&&
(
stripPattern
==
null
||
!
stripPattern
.
matcher
(
name
).
matches
()))
{
InputStream
data
=
jar
.
getInputStream
(
entry
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
if
(
md_sha1
!=
null
)
md_sha1
.
update
(
buffer
,
0
,
num
);
if
(
md_sha256
!=
null
)
md_sha256
.
update
(
buffer
,
0
,
num
);
}
Attributes
attr
=
null
;
if
(
input
!=
null
)
attr
=
input
.
getAttributes
(
name
);
attr
=
attr
!=
null
?
new
Attributes
(
attr
)
:
new
Attributes
();
// Remove any previously computed digests from this entry's attributes.
for
(
Iterator
<
Object
>
i
=
attr
.
keySet
().
iterator
();
i
.
hasNext
();)
{
Object
key
=
i
.
next
();
if
(!(
key
instanceof
Attributes
.
Name
))
{
continue
;
}
String
attributeNameLowerCase
=
((
Attributes
.
Name
)
key
).
toString
().
toLowerCase
(
Locale
.
US
);
if
(
attributeNameLowerCase
.
endsWith
(
"-digest"
))
{
i
.
remove
();
}
}
// Add SHA-1 digest if requested
if
(
md_sha1
!=
null
)
{
attr
.
putValue
(
"SHA1-Digest"
,
new
String
(
Base64
.
encode
(
md_sha1
.
digest
()),
"ASCII"
));
}
// Add SHA-256 digest if requested
if
(
md_sha256
!=
null
)
{
attr
.
putValue
(
"SHA-256-Digest"
,
new
String
(
Base64
.
encode
(
md_sha256
.
digest
()),
"ASCII"
));
}
output
.
getEntries
().
put
(
name
,
attr
);
}
}
return
output
;
}
}
private
static
void
copyFiles
(
Manifest
manifest
,
@Override
JarFile
in
,
JarOutputStream
out
,
long
timestamp
)
throws
IOException
{
public
void
write
(
OutputStream
out
)
throws
IOException
,
CMSException
{
byte
[]
buffer
=
new
byte
[
4096
];
FileInputStream
input
=
new
FileInputStream
(
file
);
int
num
;
long
len
=
file
.
length
()
-
2
;
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
while
((
bufferSize
=
input
.
read
(
buffer
))
>
0
)
{
ArrayList
<
String
>
names
=
new
ArrayList
<
String
>(
entries
.
keySet
());
if
(
len
<=
bufferSize
)
{
Collections
.
sort
(
names
);
out
.
write
(
buffer
,
0
,
(
int
)
len
);
for
(
String
name
:
names
)
{
break
;
JarEntry
inEntry
=
in
.
getJarEntry
(
name
);
JarEntry
outEntry
=
null
;
if
(
inEntry
.
getMethod
()
==
JarEntry
.
STORED
)
{
// Preserve the STORED method of the input entry.
outEntry
=
new
JarEntry
(
inEntry
);
}
else
{
}
else
{
// Create a new entry so that the compressed len is recomputed.
out
.
write
(
buffer
,
0
,
bufferSize
);
outEntry
=
new
JarEntry
(
name
);
}
outEntry
.
setTime
(
timestamp
);
out
.
putNextEntry
(
outEntry
);
InputStream
data
=
in
.
getInputStream
(
inEntry
);
while
((
num
=
data
.
read
(
buffer
))
>
0
)
{
out
.
write
(
buffer
,
0
,
num
);
}
}
out
.
flush
()
;
len
-=
bufferSize
;
}
}
}
}
private
static
void
signWholeFile
(
JarFile
inputJar
,
InputStream
publicKeyFile
,
@Override
X509Certificate
publicKey
,
PrivateKey
privateKey
,
public
Object
getContent
()
{
OutputStream
outputStream
)
throws
Exception
{
return
file
;
CMSSigner
cmsOut
=
new
CMSSigner
(
inputJar
,
publicKeyFile
,
publicKey
,
privateKey
,
outputStream
);
ByteArrayOutputStream
temp
=
new
ByteArrayOutputStream
();
// put a readable message and a null char at the start of the
// archive comment, so that tools that display the comment
// (hopefully) show something sensible.
// TODO: anything more useful we can put in this message?
byte
[]
message
=
"signed by SignApk"
.
getBytes
(
"UTF-8"
);
temp
.
write
(
message
);
temp
.
write
(
0
);
cmsOut
.
writeSignatureBlock
(
temp
);
byte
[]
zipData
=
cmsOut
.
getSigner
().
getTail
();
// For a zip with no archive comment, the
// end-of-central-directory record will be 22 bytes long, so
// we expect to find the EOCD marker 22 bytes from the end.
if
(
zipData
[
zipData
.
length
-
22
]
!=
0x50
||
zipData
[
zipData
.
length
-
21
]
!=
0x4b
||
zipData
[
zipData
.
length
-
20
]
!=
0x05
||
zipData
[
zipData
.
length
-
19
]
!=
0x06
)
{
throw
new
IllegalArgumentException
(
"zip data already has an archive comment"
);
}
int
total_size
=
temp
.
size
()
+
6
;
if
(
total_size
>
0xffff
)
{
throw
new
IllegalArgumentException
(
"signature is too big for ZIP file comment"
);
}
// signature starts this many bytes from the end of the file
int
signature_start
=
total_size
-
message
.
length
-
1
;
temp
.
write
(
signature_start
&
0xff
);
temp
.
write
((
signature_start
>>
8
)
&
0xff
);
// Why the 0xff bytes? In a zip file with no archive comment,
// bytes [-6:-2] of the file are the little-endian offset from
// the start of the file to the central directory. So for the
// two high bytes to be 0xff 0xff, the archive would have to
// be nearly 4GB in size. So it's unlikely that a real
// commentless archive would have 0xffs here, and lets us tell
// an old signed archive from a new one.
temp
.
write
(
0xff
);
temp
.
write
(
0xff
);
temp
.
write
(
total_size
&
0xff
);
temp
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
flush
();
// Signature verification checks that the EOCD header is the
// last such sequence in the file (to avoid minzip finding a
// fake EOCD appended after the signature in its scan). The
// odds of producing this sequence by chance are very low, but
// let's catch it here if it does.
byte
[]
b
=
temp
.
toByteArray
();
for
(
int
i
=
0
;
i
<
b
.
length
-
3
;
++
i
)
{
if
(
b
[
i
]
==
0x50
&&
b
[
i
+
1
]
==
0x4b
&&
b
[
i
+
2
]
==
0x05
&&
b
[
i
+
3
]
==
0x06
)
{
throw
new
IllegalArgumentException
(
"found spurious EOCD header at "
+
i
);
}
}
outputStream
.
write
(
total_size
&
0xff
);
outputStream
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
writeTo
(
outputStream
);
}
}
private
static
class
CMSSigner
implements
CMSTypedData
{
byte
[]
getTail
()
{
private
JarFile
inputJar
;
return
Arrays
.
copyOfRange
(
buffer
,
0
,
bufferSize
);
private
InputStream
publicKeyFile
;
private
X509Certificate
publicKey
;
private
PrivateKey
privateKey
;
private
OutputStream
outputStream
;
private
final
ASN1ObjectIdentifier
type
;
private
GeneralUtils
.
WholeFileSignerOutputStream
signer
;
public
CMSSigner
(
JarFile
inputJar
,
InputStream
publicKeyFile
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
outputStream
)
{
this
.
inputJar
=
inputJar
;
this
.
publicKeyFile
=
publicKeyFile
;
this
.
publicKey
=
publicKey
;
this
.
privateKey
=
privateKey
;
this
.
outputStream
=
outputStream
;
this
.
type
=
new
ASN1ObjectIdentifier
(
CMSObjectIdentifiers
.
data
.
getId
());
}
public
Object
getContent
()
{
// Not supported, but still don't crash or return null
return
1
;
}
public
ASN1ObjectIdentifier
getContentType
()
{
return
type
;
}
public
void
write
(
OutputStream
out
)
throws
IOException
{
try
{
signer
=
new
GeneralUtils
.
WholeFileSignerOutputStream
(
out
,
outputStream
);
JarOutputStream
outputJar
=
new
JarOutputStream
(
signer
);
int
hash
=
getDigestAlgorithm
(
publicKey
);
// Assume the certificate is valid for at least an hour.
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
Manifest
manifest
=
addDigestsToManifest
(
inputJar
,
hash
);
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
);
// Don't add Otacert, it's not an OTA
// addOtacert(outputJar, publicKeyFile, timestamp, manifest, hash);
GeneralUtils
.
signFile
(
manifest
,
publicKey
,
privateKey
,
outputJar
);
signer
.
notifyClosing
();
outputJar
.
close
();
signer
.
finish
();
}
catch
(
Exception
e
)
{
throw
new
IOException
(
e
);
}
}
public
void
writeSignatureBlock
(
ByteArrayOutputStream
temp
)
throws
IOException
,
CertificateEncodingException
,
OperatorCreationException
,
CMSException
{
GeneralUtils
.
writeSignatureBlock
(
this
,
publicKey
,
privateKey
,
temp
);
}
public
GeneralUtils
.
WholeFileSignerOutputStream
getSigner
()
{
return
signer
;
}
}
}
}
}
// This class host functions that consumes inputstreams
private
static
void
signWholeFile
(
File
input
,
X509Certificate
publicKey
,
// Uses JarMap (virtual random access JarFile in memory)
PrivateKey
privateKey
,
OutputStream
outputStream
)
public
static
class
StreamUtils
{
throws
Exception
{
ByteArrayOutputStream
temp
=
new
ByteArrayOutputStream
();
/**
// put a readable message and a null char at the start of the
* Add the hash(es) of every file to the manifest, creating it if
// archive comment, so that tools that display the comment
* necessary.
// (hopefully) show something sensible.
*/
// TODO: anything more useful we can put in this message?
private
static
Manifest
addDigestsToManifest
(
JarMap
jar
,
int
hashes
)
byte
[]
message
=
"signed by SignApk"
.
getBytes
(
"UTF-8"
);
throws
IOException
,
GeneralSecurityException
{
temp
.
write
(
message
);
Manifest
input
=
jar
.
getManifest
();
temp
.
write
(
0
);
Manifest
output
=
new
Manifest
();
Attributes
main
=
output
.
getMainAttributes
();
CMSProcessableFile
cmsFile
=
new
CMSProcessableFile
(
input
);
if
(
input
!=
null
)
{
writeSignatureBlock
(
cmsFile
,
publicKey
,
privateKey
,
temp
);
main
.
putAll
(
input
.
getMainAttributes
());
}
else
{
// For a zip with no archive comment, the
main
.
putValue
(
"Manifest-Version"
,
"1.0"
);
// end-of-central-directory record will be 22 bytes long, so
main
.
putValue
(
"Created-By"
,
"1.0 (Android SignApk)"
);
// we expect to find the EOCD marker 22 bytes from the end.
}
byte
[]
zipData
=
cmsFile
.
getTail
();
MessageDigest
md_sha1
=
null
;
if
(
zipData
[
zipData
.
length
-
22
]
!=
0x50
||
MessageDigest
md_sha256
=
null
;
zipData
[
zipData
.
length
-
21
]
!=
0x4b
||
if
((
hashes
&
USE_SHA1
)
!=
0
)
{
zipData
[
zipData
.
length
-
20
]
!=
0x05
||
md_sha1
=
MessageDigest
.
getInstance
(
"SHA1"
);
zipData
[
zipData
.
length
-
19
]
!=
0x06
)
{
}
throw
new
IllegalArgumentException
(
"zip data already has an archive comment"
);
if
((
hashes
&
USE_SHA256
)
!=
0
)
{
md_sha256
=
MessageDigest
.
getInstance
(
"SHA256"
);
}
// We sort the input entries by name, and add them to the
// output manifest in sorted order. We expect that the output
// map will be deterministic.
/* JarMap is a TreeMap, so it's already sorted */
for
(
String
name
:
jar
.
keySet
())
{
JarEntry
entry
=
jar
.
getJarEntry
(
name
);
if
(!
entry
.
isDirectory
()
&&
(
stripPattern
==
null
||
!
stripPattern
.
matcher
(
name
).
matches
()))
{
byte
[]
buffer
=
jar
.
getStream
(
name
).
toByteArray
();
if
(
md_sha1
!=
null
)
md_sha1
.
update
(
buffer
,
0
,
buffer
.
length
);
if
(
md_sha256
!=
null
)
md_sha256
.
update
(
buffer
,
0
,
buffer
.
length
);
Attributes
attr
=
null
;
if
(
input
!=
null
)
attr
=
input
.
getAttributes
(
name
);
attr
=
attr
!=
null
?
new
Attributes
(
attr
)
:
new
Attributes
();
if
(
md_sha1
!=
null
)
{
attr
.
putValue
(
"SHA1-Digest"
,
new
String
(
Base64
.
encode
(
md_sha1
.
digest
()),
"ASCII"
));
}
if
(
md_sha256
!=
null
)
{
attr
.
putValue
(
"SHA-256-Digest"
,
new
String
(
Base64
.
encode
(
md_sha256
.
digest
()),
"ASCII"
));
}
output
.
getEntries
().
put
(
name
,
attr
);
}
}
return
output
;
}
/**
* Copy all the files in a manifest from input to output. We set
* the modification times in the output to a fixed time, so as to
* reduce variation in the output file and make incremental OTAs
* more efficient.
*/
private
static
void
copyFiles
(
Manifest
manifest
,
JarMap
in
,
JarOutputStream
out
,
long
timestamp
)
throws
IOException
{
Map
<
String
,
Attributes
>
entries
=
manifest
.
getEntries
();
ArrayList
<
String
>
names
=
new
ArrayList
<>(
entries
.
keySet
());
Collections
.
sort
(
names
);
for
(
String
name
:
names
)
{
JarEntry
inEntry
=
in
.
getJarEntry
(
name
);
JarEntry
outEntry
;
if
(
inEntry
.
getMethod
()
==
JarEntry
.
STORED
)
{
// Preserve the STORED method of the input entry.
outEntry
=
new
JarEntry
(
inEntry
);
}
else
{
// Create a new entry so that the compressed len is recomputed.
outEntry
=
new
JarEntry
(
name
);
}
outEntry
.
setTime
(
timestamp
);
out
.
putNextEntry
(
outEntry
);
in
.
getStream
(
name
).
writeTo
(
out
);
out
.
flush
();
}
}
private
static
void
signWholeFile
(
JarMap
inputJar
,
InputStream
publicKeyFile
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
OutputStream
outputStream
)
throws
Exception
{
CMSSigner
cmsOut
=
new
CMSSigner
(
inputJar
,
publicKeyFile
,
publicKey
,
privateKey
,
outputStream
);
ByteArrayOutputStream
temp
=
new
ByteArrayOutputStream
();
// put a readable message and a null char at the start of the
// archive comment, so that tools that display the comment
// (hopefully) show something sensible.
// TODO: anything more useful we can put in this message?
byte
[]
message
=
"signed by SignApk"
.
getBytes
(
"UTF-8"
);
temp
.
write
(
message
);
temp
.
write
(
0
);
cmsOut
.
writeSignatureBlock
(
temp
);
byte
[]
zipData
=
cmsOut
.
getSigner
().
getTail
();
// For a zip with no archive comment, the
// end-of-central-directory record will be 22 bytes long, so
// we expect to find the EOCD marker 22 bytes from the end.
if
(
zipData
[
zipData
.
length
-
22
]
!=
0x50
||
zipData
[
zipData
.
length
-
21
]
!=
0x4b
||
zipData
[
zipData
.
length
-
20
]
!=
0x05
||
zipData
[
zipData
.
length
-
19
]
!=
0x06
)
{
throw
new
IllegalArgumentException
(
"zip data already has an archive comment"
);
}
int
total_size
=
temp
.
size
()
+
6
;
if
(
total_size
>
0xffff
)
{
throw
new
IllegalArgumentException
(
"signature is too big for ZIP file comment"
);
}
// signature starts this many bytes from the end of the file
int
signature_start
=
total_size
-
message
.
length
-
1
;
temp
.
write
(
signature_start
&
0xff
);
temp
.
write
((
signature_start
>>
8
)
&
0xff
);
// Why the 0xff bytes? In a zip file with no archive comment,
// bytes [-6:-2] of the file are the little-endian offset from
// the start of the file to the central directory. So for the
// two high bytes to be 0xff 0xff, the archive would have to
// be nearly 4GB in size. So it's unlikely that a real
// commentless archive would have 0xffs here, and lets us tell
// an old signed archive from a new one.
temp
.
write
(
0xff
);
temp
.
write
(
0xff
);
temp
.
write
(
total_size
&
0xff
);
temp
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
flush
();
// Signature verification checks that the EOCD header is the
// last such sequence in the file (to avoid minzip finding a
// fake EOCD appended after the signature in its scan). The
// odds of producing this sequence by chance are very low, but
// let's catch it here if it does.
byte
[]
b
=
temp
.
toByteArray
();
for
(
int
i
=
0
;
i
<
b
.
length
-
3
;
++
i
)
{
if
(
b
[
i
]
==
0x50
&&
b
[
i
+
1
]
==
0x4b
&&
b
[
i
+
2
]
==
0x05
&&
b
[
i
+
3
]
==
0x06
)
{
throw
new
IllegalArgumentException
(
"found spurious EOCD header at "
+
i
);
}
}
outputStream
.
write
(
total_size
&
0xff
);
outputStream
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
writeTo
(
outputStream
);
}
}
int
total_size
=
temp
.
size
()
+
6
;
public
static
class
JarMap
extends
TreeMap
<
String
,
Pair
<
JarEntry
,
ByteArrayOutputStream
>
>
{
if
(
total_size
>
0xffff
)
{
throw
new
IllegalArgumentException
(
"signature is too big for ZIP file comment"
);
private
Manifest
manifest
;
public
JarMap
(
JarInputStream
in
)
throws
IOException
{
super
();
manifest
=
in
.
getManifest
();
byte
[]
buffer
=
new
byte
[
4096
];
int
num
;
JarEntry
entry
;
while
((
entry
=
in
.
getNextJarEntry
())
!=
null
)
{
ByteArrayOutputStream
stream
=
new
ByteArrayOutputStream
();
while
((
num
=
in
.
read
(
buffer
))
>
0
)
{
stream
.
write
(
buffer
,
0
,
num
);
}
put
(
entry
.
getName
(),
entry
,
stream
);
}
in
.
close
();
}
public
JarEntry
getJarEntry
(
String
name
)
{
return
get
(
name
).
first
;
}
public
ByteArrayOutputStream
getStream
(
String
name
)
{
return
get
(
name
).
second
;
}
public
void
put
(
String
name
,
JarEntry
entry
,
ByteArrayOutputStream
stream
)
{
put
(
name
,
new
Pair
<>(
entry
,
stream
));
}
public
Manifest
getManifest
()
{
return
manifest
;
}
public
Enumeration
<
JarEntry
>
entries
()
{
Iterator
<
Map
.
Entry
<
String
,
Pair
<
JarEntry
,
ByteArrayOutputStream
>
>>
i
=
entrySet
().
iterator
();
ArrayList
<
JarEntry
>
list
=
new
ArrayList
<>();
while
(
i
.
hasNext
())
list
.
add
(
i
.
next
().
getValue
().
first
);
return
Collections
.
enumeration
(
list
);
}
}
}
// signature starts this many bytes from the end of the file
private
static
class
CMSSigner
implements
CMSTypedData
{
int
signature_start
=
total_size
-
message
.
length
-
1
;
private
JarMap
inputJar
;
temp
.
write
(
signature_start
&
0xff
);
private
InputStream
publicKeyFile
;
temp
.
write
((
signature_start
>>
8
)
&
0xff
);
private
X509Certificate
publicKey
;
// Why the 0xff bytes? In a zip file with no archive comment,
private
PrivateKey
privateKey
;
// bytes [-6:-2] of the file are the little-endian offset from
private
OutputStream
outputStream
;
// the start of the file to the central directory. So for the
private
final
ASN1ObjectIdentifier
type
;
// two high bytes to be 0xff 0xff, the archive would have to
private
GeneralUtils
.
WholeFileSignerOutputStream
signer
;
// be nearly 4GB in size. So it's unlikely that a real
public
CMSSigner
(
JarMap
inputJar
,
InputStream
publicKeyFile
,
// commentless archive would have 0xffs here, and lets us tell
X509Certificate
publicKey
,
PrivateKey
privateKey
,
// an old signed archive from a new one.
OutputStream
outputStream
)
{
temp
.
write
(
0xff
);
this
.
inputJar
=
inputJar
;
temp
.
write
(
0xff
);
this
.
publicKeyFile
=
publicKeyFile
;
temp
.
write
(
total_size
&
0xff
);
this
.
publicKey
=
publicKey
;
temp
.
write
((
total_size
>>
8
)
&
0xff
);
this
.
privateKey
=
privateKey
;
temp
.
flush
();
this
.
outputStream
=
outputStream
;
// Signature verification checks that the EOCD header is the
this
.
type
=
new
ASN1ObjectIdentifier
(
CMSObjectIdentifiers
.
data
.
getId
());
// last such sequence in the file (to avoid minzip finding a
}
// fake EOCD appended after the signature in its scan). The
public
Object
getContent
()
{
// odds of producing this sequence by chance are very low, but
// Not supported, but still don't crash or return null
// let's catch it here if it does.
return
1
;
byte
[]
b
=
temp
.
toByteArray
();
}
for
(
int
i
=
0
;
i
<
b
.
length
-
3
;
++
i
)
{
public
ASN1ObjectIdentifier
getContentType
()
{
if
(
b
[
i
]
==
0x50
&&
b
[
i
+
1
]
==
0x4b
&&
b
[
i
+
2
]
==
0x05
&&
b
[
i
+
3
]
==
0x06
)
{
return
type
;
throw
new
IllegalArgumentException
(
"found spurious EOCD header at "
+
i
);
}
public
void
write
(
OutputStream
out
)
throws
IOException
{
try
{
signer
=
new
GeneralUtils
.
WholeFileSignerOutputStream
(
out
,
outputStream
);
JarOutputStream
outputJar
=
new
JarOutputStream
(
signer
);
int
hash
=
FileUtils
.
getDigestAlgorithm
(
publicKey
);
// Assume the certificate is valid for at least an hour.
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
Manifest
manifest
=
addDigestsToManifest
(
inputJar
,
hash
);
copyFiles
(
manifest
,
inputJar
,
outputJar
,
timestamp
);
// Don't add Otacert, it's not an OTA
// addOtacert(outputJar, publicKeyFile, timestamp, manifest, hash);
GeneralUtils
.
signFile
(
manifest
,
publicKey
,
privateKey
,
outputJar
);
signer
.
notifyClosing
();
outputJar
.
close
();
signer
.
finish
();
}
catch
(
Exception
e
)
{
throw
new
IOException
(
e
);
}
}
public
void
writeSignatureBlock
(
ByteArrayOutputStream
temp
)
throws
IOException
,
CertificateEncodingException
,
OperatorCreationException
,
CMSException
{
GeneralUtils
.
writeSignatureBlock
(
this
,
publicKey
,
privateKey
,
temp
);
}
public
GeneralUtils
.
WholeFileSignerOutputStream
getSigner
()
{
return
signer
;
}
}
}
}
cmsFile
.
write
(
outputStream
);
outputStream
.
write
(
total_size
&
0xff
);
outputStream
.
write
((
total_size
>>
8
)
&
0xff
);
temp
.
writeTo
(
outputStream
);
}
private
static
void
signFile
(
Manifest
manifest
,
JarFile
inputJar
,
X509Certificate
publicKey
,
PrivateKey
privateKey
,
JarOutputStream
outputJar
)
throws
Exception
{
// Assume the certificate is valid for at least an hour.
long
timestamp
=
publicKey
.
getNotBefore
().
getTime
()
+
3600L
*
1000
;
// MANIFEST.MF
JarEntry
je
=
new
JarEntry
(
JarFile
.
MANIFEST_NAME
);
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
manifest
.
write
(
outputJar
);
je
=
new
JarEntry
(
CERT_SF_NAME
);
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
ByteArrayOutputStream
baos
=
new
ByteArrayOutputStream
();
writeSignatureFile
(
manifest
,
baos
,
getDigestAlgorithm
(
publicKey
));
byte
[]
signedData
=
baos
.
toByteArray
();
outputJar
.
write
(
signedData
);
// CERT.{EC,RSA} / CERT#.{EC,RSA}
final
String
keyType
=
publicKey
.
getPublicKey
().
getAlgorithm
();
je
=
new
JarEntry
(
String
.
format
(
CERT_SIG_NAME
,
keyType
));
je
.
setTime
(
timestamp
);
outputJar
.
putNextEntry
(
je
);
writeSignatureBlock
(
new
CMSProcessableByteArray
(
signedData
),
publicKey
,
privateKey
,
outputJar
);
}
}
}
}
\ No newline at end of file
app/src/main/jni/jni_glue.c
View file @
fd4aaab1
...
@@ -3,58 +3,17 @@
...
@@ -3,58 +3,17 @@
//
//
#include <jni.h>
#include <jni.h>
#include <stdlib.h>
#include <unistd.h>
#include <fcntl.h>
#include "zipadjust.h"
#include "zipadjust.h"
JNIEXPORT
jbyteArray
JNICALL
Java_com_topjohnwu_magisk_utils_ZipUtils_zipAdjust___3BI
(
JNIEnv
*
env
,
jclass
type
,
jbyteArray
jbytes
,
jint
size
)
{
fin
=
(
*
env
)
->
GetPrimitiveArrayCritical
(
env
,
jbytes
,
NULL
);
insize
=
(
size_t
)
size
;
zipadjust
(
0
);
(
*
env
)
->
ReleasePrimitiveArrayCritical
(
env
,
jbytes
,
fin
,
0
);
jbyteArray
ret
=
(
*
env
)
->
NewByteArray
(
env
,
outsize
);
(
*
env
)
->
SetByteArrayRegion
(
env
,
ret
,
0
,
outsize
,
(
const
jbyte
*
)
fout
);
free
(
fout
);
return
ret
;
}
JNIEXPORT
void
JNICALL
JNIEXPORT
void
JNICALL
Java_com_topjohnwu_magisk_utils_ZipUtils_zipAdjust__Ljava_lang_String_2
(
JNIEnv
*
env
,
jclass
type
,
jstring
name
)
{
Java_com_topjohnwu_magisk_utils_ZipUtils_zipAdjust
(
JNIEnv
*
env
,
jclass
type
,
jstring
filenameIn_
,
const
char
*
filename
=
(
*
env
)
->
GetStringUTFChars
(
env
,
name
,
NULL
);
jstring
filenameOut_
)
{
int
fd
=
open
(
filename
,
O_RDONLY
);
const
char
*
filenameIn
=
(
*
env
)
->
GetStringUTFChars
(
env
,
filenameIn_
,
0
);
if
(
fd
<
0
)
const
char
*
filenameOut
=
(
*
env
)
->
GetStringUTFChars
(
env
,
filenameOut_
,
0
);
return
;
// Load the file to memory
insize
=
lseek
(
fd
,
0
,
SEEK_END
);
lseek
(
fd
,
0
,
SEEK_SET
);
fin
=
malloc
(
insize
);
read
(
fd
,
fin
,
insize
);
zipadjust
(
0
);
close
(
fd
);
// Open file for output
fd
=
open
(
filename
,
O_WRONLY
|
O_TRUNC
);
if
(
fd
<
0
)
return
;
(
*
env
)
->
ReleaseStringUTFChars
(
env
,
name
,
filename
);
// Write back to file
lseek
(
fd
,
0
,
SEEK_SET
);
write
(
fd
,
fout
,
outsize
);
close
(
fd
);
// TODO
free
(
fin
);
zipadjust
(
filenameIn
,
filenameOut
,
0
);
free
(
fout
);
}
(
*
env
)
->
ReleaseStringUTFChars
(
env
,
filenameIn_
,
filenameIn
);
(
*
env
)
->
ReleaseStringUTFChars
(
env
,
filenameOut_
,
filenameOut
);
}
\ No newline at end of file
app/src/main/jni/zipadjust.c
View file @
fd4aaab1
#include <stdlib.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <zlib.h>
#include <zlib.h>
#include <unistd.h>
#include "zipadjust.h"
#include "zipadjust.h"
size_t
insize
=
0
,
outsize
=
0
,
alloc
=
0
;
#ifndef O_BINARY
unsigned
char
*
fin
=
NULL
,
*
fout
=
NULL
;
#define O_BINARY 0
#define O_TEXT 0
#endif
#pragma pack(1)
#pragma pack(1)
struct
local_header_struct
{
struct
local_header_struct
{
...
@@ -81,41 +86,43 @@ static int xerror(char* message) {
...
@@ -81,41 +86,43 @@ static int xerror(char* message) {
return
0
;
return
0
;
}
}
static
int
xseekread
(
off_t
offset
,
void
*
buf
,
size_t
bytes
)
{
static
int
xseekread
(
int
fd
,
off_t
offset
,
void
*
buf
,
size_t
bytes
)
{
memcpy
(
buf
,
fin
+
offset
,
bytes
);
if
(
lseek
(
fd
,
offset
,
SEEK_SET
)
==
(
off_t
)
-
1
)
return
xerror
(
"Seek failed"
);
if
(
read
(
fd
,
buf
,
bytes
)
!=
bytes
)
return
xerror
(
"Read failed"
);
return
1
;
return
1
;
}
}
static
int
xseekwrite
(
off_t
offset
,
const
void
*
buf
,
size_t
bytes
)
{
static
int
xseekwrite
(
int
fd
,
off_t
offset
,
void
*
buf
,
size_t
bytes
)
{
if
(
offset
+
bytes
>
outsize
)
outsize
=
offset
+
bytes
;
if
(
lseek
(
fd
,
offset
,
SEEK_SET
)
==
(
off_t
)
-
1
)
return
xerror
(
"Seek failed"
);
if
(
outsize
>
alloc
)
{
if
(
write
(
fd
,
buf
,
bytes
)
!=
bytes
)
return
xerror
(
"Write failed"
);
fout
=
realloc
(
fout
,
outsize
);
alloc
=
outsize
;
}
memcpy
(
fout
+
offset
,
buf
,
bytes
);
return
1
;
return
1
;
}
}
static
int
xfilecopy
(
off_t
offsetIn
,
off_t
offsetOut
,
size_t
bytes
)
{
static
int
xfilecopy
(
int
fdIn
,
int
fdOut
,
off_t
offsetIn
,
off_t
offsetOut
,
size_t
bytes
)
{
unsigned
int
CHUNK
=
256
*
1024
;
if
((
offsetIn
!=
(
off_t
)
-
1
)
&&
(
lseek
(
fdIn
,
offsetIn
,
SEEK_SET
)
==
(
off_t
)
-
1
))
return
xerror
(
"Seek failed"
);
unsigned
char
*
buf
=
malloc
(
CHUNK
);
if
((
offsetOut
!=
(
off_t
)
-
1
)
&&
(
lseek
(
fdOut
,
offsetOut
,
SEEK_SET
)
==
(
off_t
)
-
1
))
return
xerror
(
"Seek failed"
);
int
CHUNK
=
256
*
1024
;
void
*
buf
=
malloc
(
CHUNK
);
if
(
buf
==
NULL
)
return
xerror
(
"malloc failed"
);
if
(
buf
==
NULL
)
return
xerror
(
"malloc failed"
);
size_t
left
=
bytes
;
size_t
left
=
bytes
;
while
(
left
>
0
)
{
while
(
left
>
0
)
{
size_t
wanted
=
(
left
<
CHUNK
)
?
left
:
CHUNK
;
size_t
wanted
=
(
left
<
CHUNK
)
?
left
:
CHUNK
;
xseekread
(
offsetIn
,
buf
,
wanted
);
size_t
r
=
read
(
fdIn
,
buf
,
wanted
);
xseekwrite
(
offsetOut
,
buf
,
wanted
);
if
(
r
<=
0
)
return
xerror
(
"Read failed"
);
offsetIn
+=
wanted
;
if
(
write
(
fdOut
,
buf
,
r
)
!=
r
)
return
xerror
(
"Write failed"
);
offsetOut
+=
wanted
;
left
-=
r
;
left
-=
wanted
;
}
}
free
(
buf
);
free
(
buf
);
return
1
;
return
1
;
}
}
static
int
xdecompress
(
off_t
offsetIn
,
off_t
offsetOut
,
size_t
bytes
)
{
static
int
xdecompress
(
int
fdIn
,
int
fdOut
,
off_t
offsetIn
,
off_t
offsetOut
,
size_t
bytes
)
{
unsigned
int
CHUNK
=
256
*
1024
;
if
((
offsetIn
!=
(
off_t
)
-
1
)
&&
(
lseek
(
fdIn
,
offsetIn
,
SEEK_SET
)
==
(
off_t
)
-
1
))
return
xerror
(
"Seek failed"
);
if
((
offsetOut
!=
(
off_t
)
-
1
)
&&
(
lseek
(
fdOut
,
offsetOut
,
SEEK_SET
)
==
(
off_t
)
-
1
))
return
xerror
(
"Seek failed"
);
int
CHUNK
=
256
*
1024
;
int
ret
;
int
ret
;
unsigned
have
;
unsigned
have
;
...
@@ -132,12 +139,9 @@ static int xdecompress(off_t offsetIn, off_t offsetOut, size_t bytes) {
...
@@ -132,12 +139,9 @@ static int xdecompress(off_t offsetIn, off_t offsetOut, size_t bytes) {
if
(
ret
!=
Z_OK
)
return
xerror
(
"ret != Z_OK"
);
if
(
ret
!=
Z_OK
)
return
xerror
(
"ret != Z_OK"
);
do
{
do
{
strm
.
avail_in
=
insize
-
offsetIn
;
strm
.
avail_in
=
read
(
fdIn
,
in
,
CHUNK
)
;
if
(
strm
.
avail_in
==
0
)
break
;
if
(
strm
.
avail_in
==
0
)
break
;
strm
.
avail_in
=
(
strm
.
avail_in
>
CHUNK
)
?
CHUNK
:
strm
.
avail_in
;
xseekread
(
offsetIn
,
in
,
strm
.
avail_in
);
strm
.
next_in
=
in
;
strm
.
next_in
=
in
;
offsetIn
+=
strm
.
avail_in
;
do
{
do
{
strm
.
avail_out
=
CHUNK
;
strm
.
avail_out
=
CHUNK
;
...
@@ -155,8 +159,10 @@ static int xdecompress(off_t offsetIn, off_t offsetOut, size_t bytes) {
...
@@ -155,8 +159,10 @@ static int xdecompress(off_t offsetIn, off_t offsetOut, size_t bytes) {
}
}
have
=
CHUNK
-
strm
.
avail_out
;
have
=
CHUNK
-
strm
.
avail_out
;
xseekwrite
(
offsetOut
,
out
,
have
);
if
(
write
(
fdOut
,
out
,
have
)
!=
have
)
{
offsetOut
+=
have
;
(
void
)
inflateEnd
(
&
strm
);
return
xerror
(
"Write failed"
);
}
}
while
(
strm
.
avail_out
==
0
);
}
while
(
strm
.
avail_out
==
0
);
}
while
(
ret
!=
Z_STREAM_END
);
}
while
(
ret
!=
Z_STREAM_END
);
(
void
)
inflateEnd
(
&
strm
);
(
void
)
inflateEnd
(
&
strm
);
...
@@ -164,118 +170,128 @@ static int xdecompress(off_t offsetIn, off_t offsetOut, size_t bytes) {
...
@@ -164,118 +170,128 @@ static int xdecompress(off_t offsetIn, off_t offsetOut, size_t bytes) {
return
ret
==
Z_STREAM_END
?
1
:
0
;
return
ret
==
Z_STREAM_END
?
1
:
0
;
}
}
int
zipadjust
(
int
decompress
)
{
int
zipadjust
(
const
char
*
filenameIn
,
const
char
*
filenameOut
,
int
decompress
)
{
int
ok
=
0
;
int
ok
=
0
;
char
filename
[
1024
];
int
fin
=
open
(
filenameIn
,
O_RDONLY
|
O_BINARY
);
if
(
fin
>
0
)
{
central_footer_t
central_footer
;
unsigned
int
size
=
lseek
(
fin
,
0
,
SEEK_END
);
uint32_t
central_directory_in_position
=
0
;
lseek
(
fin
,
0
,
SEEK_SET
);
uint32_t
central_directory_in_size
=
0
;
LOGD
(
"%d bytes
\n
"
,
size
);
uint32_t
central_directory_out_size
=
0
;
char
filename
[
1024
];
int
i
;
for
(
i
=
insize
-
4
;
i
>=
0
;
i
--
)
{
central_footer_t
central_footer
;
uint32_t
magic
=
0
;
uint32_t
central_directory_in_position
=
0
;
if
(
!
xseekread
(
i
,
&
magic
,
sizeof
(
uint32_t
)))
return
0
;
uint32_t
central_directory_in_size
=
0
;
if
(
magic
==
MAGIC_CENTRAL_FOOTER
)
{
uint32_t
central_directory_out_size
=
0
;
LOGD
(
"central footer @ %08X
\n
"
,
i
);
if
(
!
xseekread
(
i
,
&
central_footer
,
sizeof
(
central_footer_t
)))
return
0
;
int
i
;
for
(
i
=
size
-
4
;
i
>=
0
;
i
--
)
{
central_header_t
central_header
;
uint32_t
magic
=
0
;
if
(
!
xseekread
(
central_footer
.
central_directory_offset
,
&
central_header
,
sizeof
(
central_header_t
)))
return
0
;
if
(
!
xseekread
(
fin
,
i
,
&
magic
,
sizeof
(
uint32_t
)))
return
0
;
if
(
central_header
.
signature
==
MAGIC_CENTRAL_HEADER
)
{
if
(
magic
==
MAGIC_CENTRAL_FOOTER
)
{
central_directory_in_position
=
central_footer
.
central_directory_offset
;
LOGD
(
"central footer @ %08X
\n
"
,
i
);
central_directory_in_size
=
insize
-
central_footer
.
central_directory_offset
;
if
(
!
xseekread
(
fin
,
i
,
&
central_footer
,
sizeof
(
central_footer_t
)))
return
0
;
LOGD
(
"central header @ %08X (%d)
\n
"
,
central_footer
.
central_directory_offset
,
central_footer
.
central_directory_size
);
break
;
central_header_t
central_header
;
if
(
!
xseekread
(
fin
,
central_footer
.
central_directory_offset
,
&
central_header
,
sizeof
(
central_header_t
)))
return
0
;
if
(
central_header
.
signature
==
MAGIC_CENTRAL_HEADER
)
{
central_directory_in_position
=
central_footer
.
central_directory_offset
;
central_directory_in_size
=
size
-
central_footer
.
central_directory_offset
;
LOGD
(
"central header @ %08X (%d)
\n
"
,
central_footer
.
central_directory_offset
,
central_footer
.
central_directory_size
);
break
;
}
}
}
}
}
}
if
(
central_directory_in_position
==
0
)
return
0
;
unsigned
char
*
central_directory_in
=
(
unsigned
char
*
)
malloc
(
central_directory_in_size
);
unsigned
char
*
central_directory_out
=
(
unsigned
char
*
)
malloc
(
central_directory_in_size
);
if
(
!
xseekread
(
central_directory_in_position
,
central_directory_in
,
central_directory_in_size
))
return
0
;
memset
(
central_directory_out
,
0
,
central_directory_in_size
);
fout
=
(
unsigned
char
*
)
malloc
(
insize
);
alloc
=
insize
;
uintptr_t
central_directory_in_index
=
0
;
if
(
central_directory_in_position
==
0
)
return
0
;
uintptr_t
central_directory_out_index
=
0
;
unsigned
char
*
central_directory_in
=
(
unsigned
char
*
)
malloc
(
central_directory_in_size
);
central_header_t
*
central_header
=
NULL
;
unsigned
char
*
central_directory_out
=
(
unsigned
char
*
)
malloc
(
central_directory_in_size
);
if
(
!
xseekread
(
fin
,
central_directory_in_position
,
central_directory_in
,
central_directory_in_size
))
return
0
;
uint32_t
out_index
=
0
;
memset
(
central_directory_out
,
0
,
central_directory_in_size
);
while
(
1
)
{
unlink
(
filenameOut
);
central_header
=
(
central_header_t
*
)
&
central_directory_in
[
central_directory_in_index
];
int
fout
=
open
(
filenameOut
,
O_CREAT
|
O_WRONLY
|
O_BINARY
,
0644
);
if
(
central_header
->
signature
!=
MAGIC_CENTRAL_HEADER
)
break
;
if
(
fout
>
0
)
{
filename
[
central_header
->
length_filename
]
=
(
char
)
0
;
uintptr_t
central_directory_in_index
=
0
;
memcpy
(
filename
,
&
central_directory_in
[
central_directory_in_index
+
sizeof
(
central_header_t
)],
central_header
->
length_filename
);
uintptr_t
central_directory_out_index
=
0
;
LOGD
(
"%s (%d --> %d) [%08X] (%d)
\n
"
,
filename
,
central_header
->
size_uncompressed
,
central_header
->
size_compressed
,
central_header
->
crc32
,
central_header
->
length_extra
+
central_header
->
length_comment
);
central_header_t
*
central_header
=
NULL
;
uint32_t
out_index
=
0
;
while
(
1
)
{
central_header
=
(
central_header_t
*
)
&
central_directory_in
[
central_directory_in_index
];
if
(
central_header
->
signature
!=
MAGIC_CENTRAL_HEADER
)
break
;
filename
[
central_header
->
length_filename
]
=
(
char
)
0
;
memcpy
(
filename
,
&
central_directory_in
[
central_directory_in_index
+
sizeof
(
central_header_t
)],
central_header
->
length_filename
);
LOGD
(
"%s (%d --> %d) [%08X] (%d)
\n
"
,
filename
,
central_header
->
size_uncompressed
,
central_header
->
size_compressed
,
central_header
->
crc32
,
central_header
->
length_extra
+
central_header
->
length_comment
);
local_header_t
local_header
;
if
(
!
xseekread
(
fin
,
central_header
->
offset
,
&
local_header
,
sizeof
(
local_header_t
)))
return
0
;
// save and update to next index before we clobber the data
uint16_t
compression_method_old
=
central_header
->
compression_method
;
uint32_t
size_compressed_old
=
central_header
->
size_compressed
;
uint32_t
offset_old
=
central_header
->
offset
;
uint32_t
length_extra_old
=
central_header
->
length_extra
;
central_directory_in_index
+=
sizeof
(
central_header_t
)
+
central_header
->
length_filename
+
central_header
->
length_extra
+
central_header
->
length_comment
;
// copying, rewriting, and correcting local and central headers so all the information matches, and no data descriptors are necessary
central_header
->
offset
=
out_index
;
central_header
->
flags
=
central_header
->
flags
&
!
8
;
if
(
decompress
&&
(
compression_method_old
==
8
))
{
central_header
->
compression_method
=
0
;
central_header
->
size_compressed
=
central_header
->
size_uncompressed
;
}
central_header
->
length_extra
=
0
;
central_header
->
length_comment
=
0
;
local_header
.
compression_method
=
central_header
->
compression_method
;
local_header
.
flags
=
central_header
->
flags
;
local_header
.
crc32
=
central_header
->
crc32
;
local_header
.
size_uncompressed
=
central_header
->
size_uncompressed
;
local_header
.
size_compressed
=
central_header
->
size_compressed
;
local_header
.
length_extra
=
0
;
if
(
!
xseekwrite
(
fout
,
out_index
,
&
local_header
,
sizeof
(
local_header_t
)))
return
0
;
out_index
+=
sizeof
(
local_header_t
);
if
(
!
xseekwrite
(
fout
,
out_index
,
&
filename
[
0
],
central_header
->
length_filename
))
return
0
;
out_index
+=
central_header
->
length_filename
;
if
(
decompress
&&
(
compression_method_old
==
8
))
{
if
(
!
xdecompress
(
fin
,
fout
,
offset_old
+
sizeof
(
local_header_t
)
+
central_header
->
length_filename
+
length_extra_old
,
out_index
,
size_compressed_old
))
return
0
;
}
else
{
if
(
!
xfilecopy
(
fin
,
fout
,
offset_old
+
sizeof
(
local_header_t
)
+
central_header
->
length_filename
+
length_extra_old
,
out_index
,
size_compressed_old
))
return
0
;
}
out_index
+=
local_header
.
size_compressed
;
memcpy
(
&
central_directory_out
[
central_directory_out_index
],
central_header
,
sizeof
(
central_header_t
)
+
central_header
->
length_filename
);
central_directory_out_index
+=
sizeof
(
central_header_t
)
+
central_header
->
length_filename
;
}
local_header_t
local_header
;
central_directory_out_size
=
central_directory_out_index
;
if
(
!
xseekread
(
central_header
->
offset
,
&
local_header
,
sizeof
(
local_header_t
)))
return
0
;
central_footer
.
central_directory_size
=
central_directory_out_size
;
central_footer
.
central_directory_offset
=
out_index
;
central_footer
.
length_comment
=
0
;
if
(
!
xseekwrite
(
fout
,
out_index
,
central_directory_out
,
central_directory_out_size
))
return
0
;
out_index
+=
central_directory_out_size
;
if
(
!
xseekwrite
(
fout
,
out_index
,
&
central_footer
,
sizeof
(
central_footer_t
)))
return
0
;
// save and update to next index before we clobber the data
LOGD
(
"central header @ %08X (%d)
\n
"
,
central_footer
.
central_directory_offset
,
central_footer
.
central_directory_size
);
uint16_t
compression_method_old
=
central_header
->
compression_method
;
LOGD
(
"central footer @ %08X
\n
"
,
out_index
);
uint32_t
size_compressed_old
=
central_header
->
size_compressed
;
uint32_t
offset_old
=
central_header
->
offset
;
uint32_t
length_extra_old
=
central_header
->
length_extra
;
central_directory_in_index
+=
sizeof
(
central_header_t
)
+
central_header
->
length_filename
+
central_header
->
length_extra
+
central_header
->
length_comment
;
// copying, rewriting, and correcting local and central headers so all the information matches, and no data descriptors are necessary
close
(
fout
);
central_header
->
offset
=
out_index
;
ok
=
1
;
central_header
->
flags
=
central_header
->
flags
&
!
8
;
if
(
decompress
&&
(
compression_method_old
==
8
))
{
central_header
->
compression_method
=
0
;
central_header
->
size_compressed
=
central_header
->
size_uncompressed
;
}
}
central_header
->
length_extra
=
0
;
central_header
->
length_comment
=
0
;
local_header
.
compression_method
=
central_header
->
compression_method
;
local_header
.
flags
=
central_header
->
flags
;
local_header
.
crc32
=
central_header
->
crc32
;
local_header
.
size_uncompressed
=
central_header
->
size_uncompressed
;
local_header
.
size_compressed
=
central_header
->
size_compressed
;
local_header
.
length_extra
=
0
;
if
(
!
xseekwrite
(
out_index
,
&
local_header
,
sizeof
(
local_header_t
)))
return
0
;
out_index
+=
sizeof
(
local_header_t
);
if
(
!
xseekwrite
(
out_index
,
&
filename
[
0
],
central_header
->
length_filename
))
return
0
;
out_index
+=
central_header
->
length_filename
;
if
(
decompress
&&
(
compression_method_old
==
8
))
{
if
(
!
xdecompress
(
offset_old
+
sizeof
(
local_header_t
)
+
central_header
->
length_filename
+
length_extra_old
,
out_index
,
size_compressed_old
))
return
0
;
}
else
{
if
(
!
xfilecopy
(
offset_old
+
sizeof
(
local_header_t
)
+
central_header
->
length_filename
+
length_extra_old
,
out_index
,
size_compressed_old
))
return
0
;
}
out_index
+=
local_header
.
size_compressed
;
memcpy
(
&
central_directory_out
[
central_directory_out_index
],
central_header
,
sizeof
(
central_header_t
)
+
central_header
->
length_filename
);
free
(
central_directory_in
);
central_directory_out_index
+=
sizeof
(
central_header_t
)
+
central_header
->
length_filename
;
free
(
central_directory_out
);
close
(
fin
);
}
}
central_directory_out_size
=
central_directory_out_index
;
central_footer
.
central_directory_size
=
central_directory_out_size
;
central_footer
.
central_directory_offset
=
out_index
;
central_footer
.
length_comment
=
0
;
if
(
!
xseekwrite
(
out_index
,
central_directory_out
,
central_directory_out_size
))
return
0
;
out_index
+=
central_directory_out_size
;
if
(
!
xseekwrite
(
out_index
,
&
central_footer
,
sizeof
(
central_footer_t
)))
return
0
;
LOGD
(
"central header @ %08X (%d)
\n
"
,
central_footer
.
central_directory_offset
,
central_footer
.
central_directory_size
);
LOGD
(
"central footer @ %08X
\n
"
,
out_index
);
ok
=
1
;
free
(
central_directory_in
);
free
(
central_directory_out
);
return
ok
;
return
ok
;
}
}
\ No newline at end of file
app/src/main/jni/zipadjust.h
View file @
fd4aaab1
...
@@ -3,10 +3,7 @@
...
@@ -3,10 +3,7 @@
#include <android/log.h>
#include <android/log.h>
int
zipadjust
(
int
decompress
);
int
zipadjust
(
const
char
*
filenameIn
,
const
char
*
filenameOut
,
int
decompress
);
extern
size_t
insize
,
outsize
,
alloc
;
extern
unsigned
char
*
fin
,
*
fout
;
#define LOG_TAG "zipadjust"
#define LOG_TAG "zipadjust"
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment