diff --git a/app/build.gradle.kts b/app/build.gradle.kts index 75f17cf4..c59e18d2 100644 --- a/app/build.gradle.kts +++ b/app/build.gradle.kts @@ -67,8 +67,8 @@ android { applicationId = "net.opendasharchive.openarchive" minSdk = 29 targetSdk = 36 - versionCode = 30033 - versionName = "4.0.12" + versionCode = 30035 + versionName = "4.0.15" multiDexEnabled = true vectorDrawables.useSupportLibrary = true testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" @@ -281,7 +281,6 @@ dependencies { implementation(libs.okhttp.logging) implementation(libs.retrofit) implementation(libs.retrofit.kotlinx.serialization) - implementation(libs.guardianproject.sardine) // Images & Media implementation(libs.coil) diff --git a/app/src/main/java/net/opendasharchive/openarchive/core/security/TinkVaultCredentialStore.kt b/app/src/main/java/net/opendasharchive/openarchive/core/security/TinkVaultCredentialStore.kt index 78111652..4668b017 100644 --- a/app/src/main/java/net/opendasharchive/openarchive/core/security/TinkVaultCredentialStore.kt +++ b/app/src/main/java/net/opendasharchive/openarchive/core/security/TinkVaultCredentialStore.kt @@ -25,6 +25,10 @@ import javax.crypto.spec.GCMParameterSpec // Replaces TinkVaultCredentialStore — same AES-256-GCM + Android Keystore, no Tink dependency. // Migration: if decryption fails (pre-existing Tink-encrypted data), the credential is cleared // and the user will be prompted to re-enter their server password on next connection. +// +// DataStore must be a process-wide singleton for a given file (Android requirement). The +// companion object holds the single instance so that the migration-time store (created before +// Koin) and the Koin-injected store share the same underlying DataStore and never conflict. class TinkVaultCredentialStore( context: Context, private val io: CoroutineDispatcher = Dispatchers.IO @@ -32,12 +36,8 @@ class TinkVaultCredentialStore( private val appContext = context.applicationContext - private val dataStore: DataStore by lazy { - PreferenceDataStoreFactory.create( - scope = CoroutineScope(SupervisorJob() + io), - produceFile = { appContext.preferencesDataStoreFile(DATASTORE_FILE_NAME) } - ) - } + private val dataStore: DataStore + get() = getOrCreateDataStore(appContext) private fun getOrCreateKey(): SecretKey { val ks = KeyStore.getInstance(ANDROID_KEYSTORE).apply { load(null) } @@ -94,12 +94,22 @@ class TinkVaultCredentialStore( private fun secretKey(vaultId: Long) = stringPreferencesKey("vault_secret_$vaultId") - private companion object { + companion object { const val DATASTORE_FILE_NAME = "vault_secure_credentials" const val KEY_ALIAS = "openarchive_vault_master_key" - const val ANDROID_KEYSTORE = "AndroidKeyStore" - const val TRANSFORMATION = "AES/GCM/NoPadding" - const val GCM_IV_LENGTH = 12 - const val GCM_TAG_BITS = 128 + private const val ANDROID_KEYSTORE = "AndroidKeyStore" + private const val TRANSFORMATION = "AES/GCM/NoPadding" + private const val GCM_IV_LENGTH = 12 + private const val GCM_TAG_BITS = 128 + + @Volatile private var sharedDataStore: DataStore? = null + + private fun getOrCreateDataStore(context: Context): DataStore = + sharedDataStore ?: synchronized(this) { + sharedDataStore ?: PreferenceDataStoreFactory.create( + scope = CoroutineScope(SupervisorJob() + Dispatchers.IO), + produceFile = { context.applicationContext.preferencesDataStoreFile(DATASTORE_FILE_NAME) } + ).also { sharedDataStore = it } + } } } diff --git a/app/src/main/java/net/opendasharchive/openarchive/db/SugarToRoomMigrator.kt b/app/src/main/java/net/opendasharchive/openarchive/db/SugarToRoomMigrator.kt new file mode 100644 index 00000000..2bcc66bc --- /dev/null +++ b/app/src/main/java/net/opendasharchive/openarchive/db/SugarToRoomMigrator.kt @@ -0,0 +1,180 @@ +package net.opendasharchive.openarchive.db + +import androidx.room3.RoomDatabase +import com.orm.SugarRecord +import net.opendasharchive.openarchive.core.domain.EvidenceStatus +import net.opendasharchive.openarchive.core.domain.VaultType +import net.opendasharchive.openarchive.core.logger.AppLogger +import net.opendasharchive.openarchive.core.security.VaultCredentialStore +import net.opendasharchive.openarchive.util.DateUtils +import net.opendasharchive.openarchive.util.toLocalDateTime +import net.opendasharchive.openarchive.db.sugar.Collection as SugarCollection +import net.opendasharchive.openarchive.db.sugar.Media as SugarMedia +import net.opendasharchive.openarchive.db.sugar.Project as SugarProject +import net.opendasharchive.openarchive.db.sugar.Space as SugarSpace + +/** + * Standalone synchronous migrator — no Koin dependency. + * + * Call this with a temporary [AppDatabase] instance opened directly via + * [androidx.room3.Room.databaseBuilder] BEFORE [startKoin] so that the Koin + * bindings can safely target Room from the very first launch. + */ +object SugarToRoomMigrator { + + /** + * Reads every Sugar ORM record and upserts it into the corresponding Room DAO. + * Throws on unrecoverable failure; callers should catch and log. + */ + suspend fun migrate( + vaultDao: VaultDao, + archiveDao: ArchiveDao, + submissionDao: SubmissionDao, + evidenceDao: EvidenceDao, + migrationDao: MigrationDao, + credentialStore: VaultCredentialStore + ) { + migrateSpaces(vaultDao, migrationDao, credentialStore) + migrateProjects(archiveDao, migrationDao) + migrateCollections(submissionDao, migrationDao) + migrateMedia(evidenceDao, migrationDao) + migrationDao.upsert( + MigrationStateEntity(stage = "DONE", processedCount = 0, totalCount = 0, completedAt = DateUtils.now) + ) + } + + private suspend fun migrateSpaces( + vaultDao: VaultDao, + migrationDao: MigrationDao, + credentialStore: VaultCredentialStore + ) { + val spaces = try { + SugarSpace.getAll().asSequence().toList() + } catch (e: Exception) { + AppLogger.e("SugarToRoomMigrator: migrateSpaces — failed to read Sugar spaces, skipping", e) + migrationDao.upsert(MigrationStateEntity(stage = "PROJECTS", processedCount = 0, totalCount = 0)) + return + } + AppLogger.i("SugarToRoomMigrator: Migrating ${spaces.size} spaces") + + spaces.forEach { space -> + val vaultId = vaultDao.upsert( + VaultEntity( + id = space.id, + type = when (space.tType) { + SugarSpace.Type.WEBDAV -> VaultType.PRIVATE_SERVER + SugarSpace.Type.INTERNET_ARCHIVE -> VaultType.INTERNET_ARCHIVE + SugarSpace.Type.RAVEN -> VaultType.DWEB_STORAGE + }, + name = space.name, + username = space.username, + displayName = space.displayname, + host = space.host, + metaData = space.metaData, + licenseUrl = space.license, + createdAt = DateUtils.now.toLocalDateTime() + ) + ) + if (space.password.isNotBlank()) { + credentialStore.putSecret(vaultId, space.password) + } + } + migrationDao.upsert(MigrationStateEntity(stage = "PROJECTS", processedCount = 0, totalCount = 0)) + } + + private suspend fun migrateProjects(archiveDao: ArchiveDao, migrationDao: MigrationDao) { + val projects = try { + SugarRecord.findAll(SugarProject::class.java).asSequence().toList() + } catch (e: Exception) { + AppLogger.e("SugarToRoomMigrator: migrateProjects — failed to read Sugar projects, skipping", e) + migrationDao.upsert(MigrationStateEntity(stage = "COLLECTIONS", processedCount = 0, totalCount = 0)) + return + } + AppLogger.i("SugarToRoomMigrator: Migrating ${projects.size} projects") + + projects.forEach { project -> + archiveDao.upsert( + ArchiveEntity( + id = project.id, + description = project.description, + createdAt = project.created?.time?.toLocalDateTime(), + vaultId = project.spaceId ?: -1, + archived = project.isArchived, + openSubmissionId = project.openCollectionId, + licenseUrl = project.licenseUrl, + isRemote = false + ) + ) + } + migrationDao.upsert(MigrationStateEntity(stage = "COLLECTIONS", processedCount = 0, totalCount = 0)) + } + + private suspend fun migrateCollections(submissionDao: SubmissionDao, migrationDao: MigrationDao) { + val collections = try { + SugarRecord.findAll(SugarCollection::class.java).asSequence().toList() + } catch (e: Exception) { + AppLogger.e("SugarToRoomMigrator: migrateCollections — failed to read Sugar collections, skipping", e) + migrationDao.upsert(MigrationStateEntity(stage = "MEDIA", processedCount = 0, totalCount = 0)) + return + } + AppLogger.i("SugarToRoomMigrator: Migrating ${collections.size} collections") + + collections.forEach { collection -> + submissionDao.upsert( + SubmissionEntity( + id = collection.id, + archiveId = collection.projectId ?: -1, + uploadedAt = collection.uploadDate?.time?.toLocalDateTime(), + serverUrl = collection.serverUrl + ) + ) + } + migrationDao.upsert(MigrationStateEntity(stage = "MEDIA", processedCount = 0, totalCount = 0)) + } + + private suspend fun migrateMedia(evidenceDao: EvidenceDao, migrationDao: MigrationDao) { + val mediaList = try { + SugarRecord.findAll(SugarMedia::class.java).asSequence().toList() + } catch (e: Exception) { + AppLogger.e("SugarToRoomMigrator: migrateMedia — failed to read Sugar media, skipping", e) + return + } + AppLogger.i("SugarToRoomMigrator: Migrating ${mediaList.size} media items") + + mediaList.forEach { media -> + evidenceDao.upsert( + EvidenceEntity( + id = media.id, + originalFilePath = media.originalFilePath, + mimeType = media.mimeType, + createdAt = media.createDate?.time?.toLocalDateTime(), + updatedAt = media.updateDate?.time?.toLocalDateTime(), + uploadedAt = media.uploadDate?.time?.toLocalDateTime(), + serverUrl = media.serverUrl, + title = media.title, + description = media.description, + author = media.author, + location = media.location, + tags = media.tags, + licenseUrl = media.licenseUrl, + mediaHashString = media.mediaHashString, + status = when (media.sStatus) { + SugarMedia.Status.Local -> EvidenceStatus.LOCAL + SugarMedia.Status.Queued -> EvidenceStatus.QUEUED + SugarMedia.Status.Uploading -> EvidenceStatus.QUEUED + SugarMedia.Status.Uploaded -> EvidenceStatus.UPLOADED + SugarMedia.Status.Error -> EvidenceStatus.ERROR + else -> EvidenceStatus.NEW + }, + statusMessage = media.statusMessage, + archiveId = media.projectId, + submissionId = media.collectionId, + contentLength = media.contentLength, + progress = media.progress, + flag = media.flag, + priority = media.priority + ) + ) + } + } +} diff --git a/app/src/main/java/net/opendasharchive/openarchive/services/Conduit.kt b/app/src/main/java/net/opendasharchive/openarchive/services/Conduit.kt index 4e438603..d6a02cb6 100644 --- a/app/src/main/java/net/opendasharchive/openarchive/services/Conduit.kt +++ b/app/src/main/java/net/opendasharchive/openarchive/services/Conduit.kt @@ -7,9 +7,11 @@ import android.webkit.MimeTypeMap import android.net.Uri import kotlinx.coroutines.CoroutineScope import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.NonCancellable import kotlinx.coroutines.SupervisorJob import kotlinx.coroutines.cancel import kotlinx.coroutines.launch +import kotlinx.coroutines.withContext import net.opendasharchive.openarchive.R import net.opendasharchive.openarchive.analytics.api.AnalyticsEvent import net.opendasharchive.openarchive.analytics.api.AnalyticsManager @@ -199,7 +201,13 @@ abstract class Conduit( scope.cancel() } - suspend fun jobFailed(exception: Throwable) { + suspend fun jobFailed(exception: Throwable) = withContext(NonCancellable) { + // NonCancellable ensures DB writes and bus events always complete even if the + // parent serviceScope is being cancelled (e.g. onStopJob). Without it, + // suspension points inside jobFailed throw CancellationException, which propagates + // to the outer catch in upload() and calls jobFailed a second time — producing + // duplicate "Upload cancelled" log entries and leaving evidence in a bad state. + // TorNotReadyException is transient — re-queue silently so the item retries when Tor connects. if (exception is TorNotReadyException) { AppLogger.i("Tor not ready during upload, re-queuing item ${mEvidence.id}") @@ -213,7 +221,7 @@ abstract class Conduit( isUploaded = false ) scope.cancel() - return + return@withContext } // If an upload was cancelled, reset to QUEUED so it's retried on next session, @@ -247,7 +255,7 @@ abstract class Conduit( isUploaded = false ) scope.cancel() - return + return@withContext } mEvidence = mEvidence.copy( @@ -258,49 +266,49 @@ abstract class Conduit( AppLogger.e(exception) - // Track failed upload analytics (GDPR-compliant - no PII) - val vault = spaceRepository.getSpaceById(mEvidence.vaultId) - val backendType = vault?.type?.friendlyName ?: "Unknown" - val fileType = getFileType(mEvidence.mimeType) - val fileSizeKB = mEvidence.contentLength / 1024 - - // Categorize error - val errorCategory = when (exception) { - is IOException -> "network" - is FileNotFoundException -> "file_not_found" - is SecurityException -> "permission" - else -> "unknown" - } + // Track failed upload analytics (GDPR-compliant - no PII) + val vault = spaceRepository.getSpaceById(mEvidence.vaultId) + val backendType = vault?.type?.friendlyName ?: "Unknown" + val fileType = getFileType(mEvidence.mimeType) + val fileSizeKB = mEvidence.contentLength / 1024 + + // Categorize error + val errorCategory = when (exception) { + is IOException -> "network" + is FileNotFoundException -> "file_not_found" + is SecurityException -> "permission" + else -> "unknown" + } - analyticsManager.trackUploadFailed( - backendType = backendType, - fileType = fileType, - errorCategory = errorCategory, - fileSizeKB = fileSizeKB - ) + analyticsManager.trackUploadFailed( + backendType = backendType, + fileType = fileType, + errorCategory = errorCategory, + fileSizeKB = fileSizeKB + ) - // Track in session - sessionTracker.trackUploadFailed() + // Track in session + sessionTracker.trackUploadFailed() - // Track error for drop-off analysis - analyticsManager.trackError( - errorCategory = errorCategory, - screenName = "Upload", - backendType = backendType - ) + // Track error for drop-off analysis + analyticsManager.trackError( + errorCategory = errorCategory, + screenName = "Upload", + backendType = backendType + ) - BroadcastManager.postChange( - context = mContext, - collectionId = mEvidence.submissionId, - mediaId = mEvidence.id - ) - UploadEventBus.emitChanged( - projectId = mEvidence.archiveId, - collectionId = mEvidence.submissionId, - mediaId = mEvidence.id, - progress = -1, - isUploaded = false - ) + BroadcastManager.postChange( + context = mContext, + collectionId = mEvidence.submissionId, + mediaId = mEvidence.id + ) + UploadEventBus.emitChanged( + projectId = mEvidence.archiveId, + collectionId = mEvidence.submissionId, + mediaId = mEvidence.id, + progress = -1, + isUploaded = false + ) scope.cancel() } diff --git a/app/src/main/java/net/opendasharchive/openarchive/services/SaveClient.kt b/app/src/main/java/net/opendasharchive/openarchive/services/SaveClient.kt index c2eba15b..63a4f1df 100644 --- a/app/src/main/java/net/opendasharchive/openarchive/services/SaveClient.kt +++ b/app/src/main/java/net/opendasharchive/openarchive/services/SaveClient.kt @@ -1,14 +1,12 @@ package net.opendasharchive.openarchive.services import android.content.Context -import com.thegrizzlylabs.sardineandroid.impl.OkHttpSardine import net.opendasharchive.openarchive.services.tor.TorConstants import net.opendasharchive.openarchive.services.tor.TorServiceManager import net.opendasharchive.openarchive.services.common.auth.BasicAuthInterceptor import net.opendasharchive.openarchive.util.Prefs import okhttp3.OkHttpClient import okhttp3.Protocol -import okhttp3.ResponseBody.Companion.toResponseBody import org.koin.core.component.KoinComponent import org.koin.core.component.inject import java.net.Authenticator @@ -144,44 +142,4 @@ object SaveClient : KoinComponent { return builder.build() } - /** - * Creates a Sardine WebDAV client configured for the current settings. - * - * Credentials are injected as a preemptive BasicAuth interceptor on the OkHttpClient - * rather than via sardine's setCredentials(). setCredentials() rebuilds the client - * with hardcoded 30s timeouts, which kills large-file uploads. - * - * WebDAV uses forceCloseConnection=true and allowHttp2=false intentionally: - * - Connection: close prevents partial-upload corruption from stale keep-alive connections. - * - HTTP/1.1 avoids multiplexing conflicts with Nextcloud's chunked-upload temp-slot mechanism. - * - * @param context Application context - * @param user WebDAV username - * @param pass WebDAV password - * @return Configured OkHttpSardine instance - * @throws TorNotReadyException if Tor is enabled but not yet connected - */ - suspend fun getSardine(context: Context, user: String, pass: String): OkHttpSardine { - val client = get( - context = context, - user = user, - password = pass, - forceCloseConnection = true, - allowHttp2 = false - ).newBuilder() - // Sardine's execute() never closes response bodies (library bug — no try/finally). - // Buffer + close each response immediately so OkHttp can reclaim the connection. - // WebDAV response bodies are always small (XML/status); large data is always in - // request bodies (uploads), never in responses. - .addInterceptor { chain -> - val response = chain.proceed(chain.request()) - val body = response.body ?: return@addInterceptor response - val buffered = body.bytes() - response.newBuilder() - .body(buffered.toResponseBody(body.contentType())) - .build() - } - .build() - return OkHttpSardine(client) - } } diff --git a/app/src/main/java/net/opendasharchive/openarchive/services/webdav/data/WebDavConduit.kt b/app/src/main/java/net/opendasharchive/openarchive/services/webdav/data/WebDavConduit.kt index f443b7ef..4a43406c 100644 --- a/app/src/main/java/net/opendasharchive/openarchive/services/webdav/data/WebDavConduit.kt +++ b/app/src/main/java/net/opendasharchive/openarchive/services/webdav/data/WebDavConduit.kt @@ -1,20 +1,35 @@ package net.opendasharchive.openarchive.services.webdav.data import android.content.Context -import com.thegrizzlylabs.sardineandroid.SardineListener -import com.thegrizzlylabs.sardineandroid.impl.OkHttpSardine +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.withContext import net.opendasharchive.openarchive.core.logger.AppLogger import net.opendasharchive.openarchive.core.domain.Evidence import net.opendasharchive.openarchive.services.Conduit import net.opendasharchive.openarchive.services.SaveClient +import net.opendasharchive.openarchive.services.common.network.RequestBodyUtil +import net.opendasharchive.openarchive.services.common.network.createListener +import okhttp3.Call import okhttp3.HttpUrl +import okhttp3.MediaType.Companion.toMediaTypeOrNull +import okhttp3.OkHttpClient +import okhttp3.Request +import okhttp3.RequestBody.Companion.toRequestBody import java.io.FileNotFoundException import java.io.IOException class WebDavConduit(evidence: Evidence, context: Context) : Conduit(evidence, context) { - private lateinit var mClient: OkHttpSardine + private lateinit var mClient: OkHttpClient + + @Volatile + private var currentCall: Call? = null + + override fun cancel() { + super.cancel() + currentCall?.cancel() + } override suspend fun upload(): Boolean { try { @@ -23,7 +38,16 @@ class WebDavConduit(evidence: Evidence, context: Context) : Conduit(evidence, co val base = vault.hostUrl ?: return false val path = getPath() ?: return false - mClient = SaveClient.getSardine(mContext, auth.username, auth.secret) + // SaveClient.get() with user/pass adds BasicAuthInterceptor and, when Tor is + // enabled in Prefs, routes all traffic through the SOCKS5 proxy automatically. + mClient = SaveClient.get( + context = mContext, + user = auth.username, + password = auth.secret, + forceCloseConnection = true, + allowHttp2 = false + ) + sanitize() val fileName = getUploadFileName(mEvidence) @@ -58,7 +82,7 @@ class WebDavConduit(evidence: Evidence, context: Context) : Conduit(evidence, co // Step 3: Upload media file (chunked for large files, single otherwise) AppLogger.i("Begin media file upload...") val uploadSuccess = if (mEvidence.contentLength > CHUNK_FILESIZE_THRESHOLD) { - uploadChunked(base, path, fileName) + uploadChunked(base, path, fileName, vault.username) } else { uploadSingle(base, path, fileName) } @@ -83,37 +107,27 @@ class WebDavConduit(evidence: Evidence, context: Context) : Conduit(evidence, co } override suspend fun createFolder(url: String) { - if (!mClient.exists(url)) { - mClient.createDirectory(url) + if (!headExists(url)) { + mkcol(url) } else { - AppLogger.i("folder already exists: ", url) + AppLogger.i("folder already exists: $url") } } private suspend fun uploadSingle(base: HttpUrl, path: List, fileName: String): Boolean { val fullPath = construct(base, path, fileName) - AppLogger.i("Uploading single file...", "filePath: $fullPath") + AppLogger.i("Uploading single file... $fullPath") try { - mClient.put( + val listener = createListener(cancellable = { !mCancelled }, onProgress = { jobProgress(it) }) + val requestBody = RequestBodyUtil.create( mContext.contentResolver, - fullPath, mEvidence.fileUri, mEvidence.contentLength, - mEvidence.mimeType, - false, - object : SardineListener { - var lastBytes: Long = 0 - - override fun transferred(bytes: Long) { - if (bytes > lastBytes) { - jobProgress(bytes) - lastBytes = bytes - } - } - - override fun continueUpload(): Boolean = !mCancelled - }) + mEvidence.mimeType.toMediaTypeOrNull(), + listener + ) + execute(Request.Builder().url(fullPath).put(requestBody).build()) } catch (e: Throwable) { jobFailed(e) return false @@ -124,35 +138,35 @@ class WebDavConduit(evidence: Evidence, context: Context) : Conduit(evidence, co } @Throws(IOException::class) - private suspend fun uploadChunked(base: HttpUrl, path: List, fileName: String): Boolean { + private suspend fun uploadChunked( + base: HttpUrl, + path: List, + fileName: String, + username: String + ): Boolean { AppLogger.i("Uploading started as chunked upload...") val vault = spaceRepository.getSpaceById(mEvidence.vaultId) ?: return false val url = vault.hostUrl ?: return false val tmpBase = HttpUrl.Builder() .scheme(url.scheme) - .username(url.username) - .password(url.password) .host(url.host) .port(url.port) - .query(url.query) - .fragment(url.fragment) .addPathSegment("remote.php") .addPathSegment("dav") .build() - val tmpPath = listOf("uploads", vault.username, fileName) + val tmpPath = listOf("uploads", username, fileName) return try { createFolders(tmpBase, tmpPath) - // Single PROPFIND to determine if we're resuming a previous upload attempt. - // If the temp folder already has chunks, scan forward to find where to resume. - // If it's a fresh upload, skip all per-chunk exists checks (N chunks × 1–2 RTTs saved). - val tempFolderPath = construct(tmpBase, tmpPath) + // One HEAD to detect resume: check if the temp folder and first chunk already exist. + // Fresh uploads skip all per-chunk existence checks (N × 1-2 RTT saved). + val firstChunkSize = minOf(CHUNK_SIZE, mEvidence.contentLength).toInt() var isResuming = try { - mClient.exists(tempFolderPath) && - mClient.list(tempFolderPath).size > 1 // >1: folder entry + at least one chunk + headExists(construct(tmpBase, tmpPath)) && + headExists(construct(tmpBase, tmpPath, "0-$firstChunkSize")) } catch (e: Throwable) { false } AppLogger.i(if (isResuming) "Resuming chunked upload..." else "Fresh chunked upload, skipping per-chunk existence checks") @@ -175,53 +189,42 @@ class WebDavConduit(evidence: Evidence, context: Context) : Conduit(evidence, co // Only check existence when resuming. Once we find the first missing chunk, // all subsequent chunks are also missing — stop scanning. if (isResuming) { - val chunkExists = mClient.exists(chunkPath) + val chunkExists = headExists(chunkPath) if (chunkExists) { - val dirList = mClient.list(chunkPath) - val sizeMatches = !dirList.isNullOrEmpty() && - dirList.first().contentLength == length.toLong() - if (sizeMatches) { + val remoteLen = headContentLength(chunkPath) + if (remoteLen == length.toLong()) { AppLogger.i("Resuming: chunk $offset-$total already present, skipping") offset = total continue } } else { - // First missing chunk — no more resume scanning needed from here isResuming = false } } - mClient.put( - chunkPath, - buffer, - mEvidence.mimeType, - object : SardineListener { - override fun transferred(bytes: Long) { - jobProgress(offset.toLong() + bytes) - } - - override fun continueUpload(): Boolean = !mCancelled - }) + val chunkBody = buffer.toRequestBody(mEvidence.mimeType.toMediaTypeOrNull()) + execute(Request.Builder().url(chunkPath).put(chunkBody).build()) + jobProgress(total.toLong()) - // Fix: offset = total (was total + 1, which skipped 1 byte per chunk boundary) offset = total } } if (mCancelled) throw Exception("Cancelled") - val dest = mutableListOf("files", vault.username) + val dest = mutableListOf("files", username) dest.addAll(path) - mClient.move(construct(tmpBase, tmpPath, ".file"), construct(tmpBase, dest, fileName)) + move(construct(tmpBase, tmpPath, ".file"), construct(tmpBase, dest, fileName)) mEvidence = mEvidence.copy(serverUrl = construct(base, path, fileName)) true } catch (e: Throwable) { // Clean up partial upload slot on server to avoid orphaned temp chunks try { - mClient.delete(construct(tmpBase, tmpPath)) - AppLogger.i("Cleaned up partial chunked upload at ${construct(tmpBase, tmpPath)}") + val tempFolder = construct(tmpBase, tmpPath) + webdavDelete(tempFolder) + AppLogger.i("Cleaned up partial chunked upload at $tempFolder") } catch (cleanupEx: Throwable) { AppLogger.w("Failed to clean up partial chunks: ${cleanupEx.message}") } @@ -231,30 +234,91 @@ class WebDavConduit(evidence: Evidence, context: Context) : Conduit(evidence, co } private suspend fun uploadMetadata(base: HttpUrl, path: List, fileName: String) { - AppLogger.i("Uploading metadata....") + AppLogger.i("Uploading metadata...") val metadata = getMetadata() if (mCancelled) throw Exception("Cancelled") - mClient.put( - construct(base, path, "$fileName.meta.json"), - metadata.toByteArray(), - "text/plain", - null + execute( + Request.Builder() + .url(construct(base, path, "$fileName.meta.json")) + .put(metadata.toRequestBody("text/plain".toMediaTypeOrNull())) + .build() ) val c2paManifest = getC2paManifest() if (c2paManifest != null) { if (mCancelled) throw Exception("Cancelled") - AppLogger.d("Uploading C2PA manifest: ${c2paManifest.name}") - mClient.put( - construct(base, path, c2paManifest.name), - c2paManifest, - "application/json", - false, - null + execute( + Request.Builder() + .url(construct(base, path, c2paManifest.name)) + .put(c2paManifest.readBytes().toRequestBody("application/json".toMediaTypeOrNull())) + .build() ) } } + + // --- WebDAV HTTP helpers --- + + private suspend fun headExists(url: String): Boolean = withContext(Dispatchers.IO) { + val response = mClient.newCall(Request.Builder().url(url).head().build()).execute() + val code = response.code + response.close() + code in 200..299 || code == 207 + } + + private suspend fun headContentLength(url: String): Long = withContext(Dispatchers.IO) { + val response = mClient.newCall(Request.Builder().url(url).head().build()).execute() + val len = response.header("Content-Length")?.toLongOrNull() ?: -1L + response.close() + len + } + + private suspend fun mkcol(url: String) = withContext(Dispatchers.IO) { + val response = mClient.newCall( + Request.Builder().url(url).method("MKCOL", null).build() + ).execute() + val code = response.code + response.close() + // 201 = created, 405 = already exists — both acceptable + if (code !in 200..299 && code != 405) { + throw IOException("MKCOL failed: $code for $url") + } + } + + private suspend fun move(sourceUrl: String, destinationUrl: String) = withContext(Dispatchers.IO) { + val response = mClient.newCall( + Request.Builder() + .url(sourceUrl) + .method("MOVE", null) + .header("Destination", destinationUrl) + .header("Overwrite", "T") + .build() + ).execute() + val code = response.code + val message = response.message + response.close() + if (code !in 200..299) throw IOException("MOVE failed: $code $message") + } + + private suspend fun webdavDelete(url: String) = withContext(Dispatchers.IO) { + val response = mClient.newCall(Request.Builder().url(url).delete().build()).execute() + response.close() + } + + @Throws(IOException::class) + private suspend fun execute(request: Request) { + val call = mClient.newCall(request) + currentCall = call + try { + val response = withContext(Dispatchers.IO) { call.execute() } + val code = response.code + val message = response.message + response.close() + if (code !in 200..299) throw IOException("$code: $message") + } finally { + currentCall = null + } + } } diff --git a/app/src/main/java/net/opendasharchive/openarchive/services/webdav/data/WebDavRepository.kt b/app/src/main/java/net/opendasharchive/openarchive/services/webdav/data/WebDavRepository.kt index dedd80b1..ba76b299 100644 --- a/app/src/main/java/net/opendasharchive/openarchive/services/webdav/data/WebDavRepository.kt +++ b/app/src/main/java/net/opendasharchive/openarchive/services/webdav/data/WebDavRepository.kt @@ -1,6 +1,7 @@ package net.opendasharchive.openarchive.services.webdav.data import android.content.Context +import android.util.Xml import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.withContext import net.opendasharchive.openarchive.core.domain.Vault @@ -9,7 +10,15 @@ import net.opendasharchive.openarchive.features.folders.Folder import net.opendasharchive.openarchive.services.SaveClient import net.opendasharchive.openarchive.util.DateUtils import net.opendasharchive.openarchive.util.toKotlinLocalDateTime +import okhttp3.MediaType.Companion.toMediaTypeOrNull +import okhttp3.Request +import okhttp3.RequestBody.Companion.toRequestBody +import org.xmlpull.v1.XmlPullParser import java.io.IOException +import java.io.InputStream +import java.text.SimpleDateFormat +import java.util.Date +import java.util.Locale class WebDavRepository( private val context: Context, @@ -19,14 +28,108 @@ class WebDavRepository( suspend fun getFolders(vault: Vault): List = withContext(Dispatchers.IO) { val auth = spaceRepository.getVaultAuth(vault.id) ?: throw IOException("Credentials unavailable for selected server") - val root = vault.hostUrl?.encodedPath + val root = vault.hostUrl?.encodedPath?.trimEnd('/') - SaveClient.getSardine(context, auth.username, auth.secret).list(vault.host)?.mapNotNull { - if (it?.isDirectory == true && it.path != root) { - Folder(it.name, it.modified?.toKotlinLocalDateTime() ?: DateUtils.nowDateTime) - } else { - null + val client = SaveClient.get( + context = context, + user = auth.username, + password = auth.secret, + forceCloseConnection = true, + allowHttp2 = false + ) + + val body = PROPFIND_BODY.toRequestBody("application/xml; charset=utf-8".toMediaTypeOrNull()) + val request = Request.Builder() + .url(vault.host) + .method("PROPFIND", body) + .header("Depth", "1") + .build() + + val response = client.newCall(request).execute() + if (response.code != 207) { + response.close() + throw IOException("PROPFIND failed: ${response.code} ${response.message}") + } + + val resources = response.use { r -> + r.body?.byteStream()?.let { parsePropfind(it) } ?: emptyList() + } + + resources.mapNotNull { resource -> + if (resource.isDirectory && resource.href.trimEnd('/') != root) { + Folder( + name = resource.name, + modified = resource.modified?.toKotlinLocalDateTime() ?: DateUtils.nowDateTime + ) + } else null + } + } + + // --- PROPFIND XML parser --- + + private data class PropfindResource( + val href: String, + val name: String, + val isDirectory: Boolean, + val modified: Date? + ) + + private fun parsePropfind(stream: InputStream): List { + val resources = mutableListOf() + val parser = Xml.newPullParser().apply { + setFeature(XmlPullParser.FEATURE_PROCESS_NAMESPACES, true) + setInput(stream, null) + } + + var href = "" + var displayName = "" + var isCollection = false + var lastModified: Date? = null + var captureTag = "" + + while (parser.eventType != XmlPullParser.END_DOCUMENT) { + when (parser.eventType) { + XmlPullParser.START_TAG -> when (parser.name) { + "response" -> { href = ""; displayName = ""; isCollection = false; lastModified = null; captureTag = "" } + "collection" -> isCollection = true + "href", + "displayname", + "getlastmodified" -> captureTag = parser.name + } + XmlPullParser.TEXT -> { + val text = parser.text?.trim().orEmpty() + if (text.isNotEmpty()) when (captureTag) { + "href" -> href = text + "displayname" -> displayName = text + "getlastmodified" -> lastModified = parseHttpDate(text) + } + } + XmlPullParser.END_TAG -> when (parser.name) { + "response" -> { + if (href.isNotBlank()) { + val name = displayName.ifBlank { + href.trimEnd('/').substringAfterLast('/') + } + resources.add(PropfindResource(href, name, isCollection, lastModified)) + } + captureTag = "" + } + "href", "displayname", "getlastmodified" -> captureTag = "" + } } - } ?: emptyList() + parser.next() + } + return resources + } + + private fun parseHttpDate(value: String): Date? = try { + HTTP_DATE_FORMAT.parse(value) + } catch (e: Exception) { null } + + companion object { + private val HTTP_DATE_FORMAT = SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z", Locale.US) + + private const val PROPFIND_BODY = + """""" } } diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index eebd2562..65df9d86 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -1,89 +1,60 @@ [versions] activity = "1.13.0" -agp = "9.1.0" +agp = "9.1.1" androidx-exifinterface = "1.4.2" androidx-security-crypto = "1.1.0" androidx-test-runner = "1.7.0" appcompat = "1.7.1" biometric = "1.1.0" -bitcoinj-core = "0.16.2" -bouncycastle-bcpg = "1.71" -bouncycastle-bcpkix = "1.72" -bouncycastle-bcprov = "1.72" camerax = "1.6.0" clean-insights = "2.8.0" coil = "3.4.0" -compose = "1.11.0-beta02" +compose = "1.11.0" compose-preference = "2.2.0" -compose-stability = "0.7.0" -constraintlayout = "2.2.1" -constraintlayout-compos = "1.1.1" -coordinatorlayout = "1.3.0" core-ktx = "1.18.0" core-splashscreen = "1.2.0" -datastore = "1.3.0-alpha07" +datastore = "1.3.0-alpha08" coroutines = "1.10.2" detekt = "1.23.8" -detekt-compose = "0.5.6" +detekt-compose = "0.5.8" detekt-rules-compose = "1.4.0" dotsindicator = "5.1.0" -espresso-core = "3.5.1" firebase-analytics = "23.2.0" -firebase-crashlytics = "20.0.4" +firebase-crashlytics = "20.0.5" fragment = "1.8.9" -google-api-client-android = "1.26.0" -google-firebase-crashlytics = "3.0.6" -google-gms-auth = "21.4.0" -google-gms-google-services = "4.4.4" -google-http-client-gson = "1.42.3" google-play-app-update-ktx = "2.1.0" -google-play-asset-delivery-ktx = "2.3.0" -google-play-feature-delivery = "2.1.0" google-play-review = "2.0.2" -gson = "2.13.2" -guava = "31.0.1-jre" -guava-listenablefuture = "9999.0-empty-to-avoid-conflict-with-guava" -j2v8 = "6.2.1@aar" -jsoup = "1.17.2" +gson = "2.14.0" jtorctl = "0.4.5.7" junit = "4.13.2" junit-android = "1.3.0" -koin = "4.2.0" -koin-plugin = "0.4.1" -kotlin = "2.3.20" +koin = "4.2.1" +kotlin = "2.3.21" kotlinx-collections-immutable = "0.4.0" kotlinx-datetime = "0.7.1" ksp = "2.3.6" lifecycle = "2.10.0" material = "1.13.0" -material3 = "1.5.0-alpha16" +material3 = "1.5.0-alpha18" material-adaptive = "1.2.0" -mlkit-barcode = "17.3.0" -media3 = "1.9.3" -mixpanel = "8.4.0" -navigation = "2.9.7" -navigation3 = "1.1.0-rc01" -navigation-events = "1.1.0-alpha01" +media3 = "1.10.0" +mixpanel = "8.6.0" +navigation = "2.9.8" +navigation3 = "1.1.1" +navigation-events = "1.1.0" accompanist = "0.37.3" -netcipher = "2.2.0-alpha" -okhttp = "4.12.0" -permissionx = "1.8.1" -picasso = "2.8" +okhttp = "5.3.2" preference = "1.2.1" recyclerview = "1.4.0" -simple-c2pa = "0.0.13" -jna = "5.13.0" -recyclerview-selection = "1.2.0" -reorderable = "3.0.0" +reorderable = "3.1.0" retrofit = "3.0.0" robolectric = "4.16.1" -room = "3.0.0-alpha02" +room = "3.0.0-alpha03" satyan-sugar = "1.5" -serialization = "1.10.0" +serialization = "1.11.0" swiperefreshlayout = "1.2.0" timber = "5.0.1" -tor-android = "0.4.9.5.1" -viewpager2 = "1.1.0" +tor-android = "0.4.9.6" work = "2.11.2" zxing-core = "3.5.4" zxing-android-embedded = "4.3.0" @@ -120,12 +91,7 @@ androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref = androidx-core-splashscreen = { group = "androidx.core", name = "core-splashscreen", version.ref = "core-splashscreen" } androidx-appcompat = { group = "androidx.appcompat", name = "appcompat", version.ref = "appcompat" } androidx-exifinterface = { group = "androidx.exifinterface", name = "exifinterface", version.ref = "androidx-exifinterface" } -androidx-constraintlayout = { group = "androidx.constraintlayout", name = "constraintlayout", version.ref = "constraintlayout" } -androidx-constraintlayout-compose = { group = "androidx.constraintlayout", name = "constraintlayout-compose", version.ref = "constraintlayout-compos" } -androidx-coordinatorlayout = { group = "androidx.coordinatorlayout", name = "coordinatorlayout", version.ref = "coordinatorlayout" } androidx-recyclerview = { group = "androidx.recyclerview", name = "recyclerview", version.ref = "recyclerview" } -androidx-recyclerview-selection = { group = "androidx.recyclerview", name = "recyclerview-selection", version.ref = "recyclerview-selection" } -androidx-viewpager2 = { group = "androidx.viewpager2", name = "viewpager2", version.ref = "viewpager2" } androidx-swiperefresh = { group = "androidx.swiperefreshlayout", name = "swiperefreshlayout", version.ref = "swiperefreshlayout" } # AndroidX - Fragment @@ -146,9 +112,7 @@ androidx-media3-ui = { group = "androidx.media3", name = "media3-ui", version.re # AndroidX - Navigation androidx-navigation-fragment = { group = "androidx.navigation", name = "navigation-fragment-ktx", version.ref = "navigation" } -androidx-navigation-ui = { group = "androidx.navigation", name = "navigation-ui-ktx", version.ref = "navigation" } androidx-navigation-compose = { group = "androidx.navigation", name = "navigation-compose", version.ref = "navigation" } -androidx-navigation-fragment-compose = { group = "androidx.navigation", name = "navigation-fragment-compose", version.ref = "navigation" } #AndroidX - Navigation 3 androidx-navigation3-runtime = { module = "androidx.navigation3:navigation3-runtime", version.ref = "navigation3" } @@ -164,7 +128,6 @@ androidx-work = { group = "androidx.work", name = "work-runtime-ktx", version.re # AndroidX - Room androidx-room3-runtime = { group = "androidx.room3", name = "room3-runtime", version.ref = "room" } -androidx-room3-ktx = { group = "androidx.room3", name = "room3-ktx", version.ref = "room" } androidx-room3-compiler = { group = "androidx.room3", name = "room3-compiler", version.ref = "room" } # AndroidX - Testing @@ -193,20 +156,13 @@ firebase-crashlytics = { group = "com.google.firebase", name = "firebase-crashly #google-drive-api = { group = "com.google.apis", name = "google-api-services-drive", version = "v3-rev136-1.25.0" } # Google -jsoup = { module = "org.jsoup:jsoup", version.ref = "jsoup" } -picasso = { module = "com.squareup.picasso:picasso", version.ref = "picasso" } zxing-core = { module = "com.google.zxing:core", version.ref = "zxing-core" } zxing-android-embedded = { module = "com.journeyapps:zxing-android-embedded", version.ref = "zxing-android-embedded" } # Google - Material Design google-material = { group = "com.google.android.material", name = "material", version.ref = "material" } -google-mlkit-barcode = { group = "com.google.mlkit", name = "barcode-scanning", version.ref = "mlkit-barcode" } # Google - Play Services -#google-auth = { group = "com.google.android.gms", name = "play-services-auth", version.ref = "google-gms-auth" } -#google-play-asset-delivery-ktx = { group = "com.google.android.play", name = "asset-delivery-ktx", version.ref = "google-play-asset-delivery-ktx" } -#google-play-feature-delivery = { group = "com.google.android.play", name = "feature-delivery", version.ref = "google-play-feature-delivery" } -#google-play-feature-delivery-ktx = { group = "com.google.android.play", name = "feature-delivery-ktx", version.ref = "google-play-feature-delivery" } google-play-review = { group = "com.google.android.play", name = "review", version.ref = "google-play-review" } google-play-review-ktx = { group = "com.google.android.play", name = "review-ktx", version.ref = "google-play-review" } google-play-app-update-ktx = { group = "com.google.android.play", name = "app-update-ktx", version.ref = "google-play-app-update-ktx" } @@ -225,9 +181,6 @@ koin-androidx-navigation = { group = "io.insert-koin", name = "koin-androidx-nav koin-compose = { group = "io.insert-koin", name = "koin-compose", version.ref = "koin" } koin-compose-viewmodel = { group = "io.insert-koin", name = "koin-compose-viewmodel", version.ref = "koin" } koin-compose-viewmodel-navigation = { group = "io.insert-koin", name = "koin-compose-viewmodel-navigation", version.ref = "koin" } -koin-compose-navigation3 = { group = "io.insert-koin", name = "koin-compose-navigation3", version.ref = "koin" } - -koin-annotations = { module = "io.insert-koin:koin-annotations", version.ref = "koin" } # Kotlin kotlinx-coroutines-android = { group = "org.jetbrains.kotlinx", name = "kotlinx-coroutines-android", version.ref = "coroutines" } @@ -239,14 +192,9 @@ kotlinx-datetime = { group = "org.jetbrains.kotlinx", name = "kotlinx-datetime", okhttp = { group = "com.squareup.okhttp3", name = "okhttp", version.ref = "okhttp" } okhttp-logging = { group = "com.squareup.okhttp3", name = "logging-interceptor", version.ref = "okhttp" } retrofit = { group = "com.squareup.retrofit2", name = "retrofit", version.ref = "retrofit" } -retrofit-gson = { group = "com.squareup.retrofit2", name = "converter-gson", version.ref = "retrofit" } retrofit-kotlinx-serialization = { group = "com.squareup.retrofit2", name = "converter-kotlinx-serialization", version.ref = "retrofit" } -guardianproject-sardine = { group = "com.github.guardianproject", name = "sardine-android", version = "89f7eae512" } # Security & Cryptography -bouncycastle-bcprov = { group = "org.bouncycastle", name = "bcprov-jdk15to18", version.ref = "bouncycastle-bcprov" } -bouncycastle-bcpkix = { group = "org.bouncycastle", name = "bcpkix-jdk15to18", version.ref = "bouncycastle-bcpkix" } -bouncycastle-bcpg = { group = "org.bouncycastle", name = "bcpg-jdk15to18", version.ref = "bouncycastle-bcpg" } # Testing junit = { group = "junit", name = "junit", version.ref = "junit" } @@ -254,19 +202,11 @@ robolectric = { group = "org.robolectric", name = "robolectric", version.ref = " work-testing = { group = "androidx.work", name = "work-testing", version.ref = "work" } # Third-Party Libraries -bitcoinj-core = { group = "org.bitcoinj", name = "bitcoinj-core", version.ref = "bitcoinj-core" } clean-insights = { group = "org.cleaninsights.sdk", name = "clean-insights-sdk", version.ref = "clean-insights" } dotsindicator = { group = "com.tbuonomo", name = "dotsindicator", version.ref = "dotsindicator" } gson = { group = "com.google.code.gson", name = "gson", version.ref = "gson" } -guava = { group = "com.google.guava", name = "guava", version.ref = "guava" } -guava-listenablefuture = { group = "com.google.guava", name = "listenablefuture", version.ref = "guava-listenablefuture" } -j2v8 = { group = "com.eclipsesource.j2v8", name = "j2v8", version.ref = "j2v8" } jtorctl = { group = "info.guardianproject", name = "jtorctl", version.ref = "jtorctl" } mixpanel = { group = "com.mixpanel.android", name = "mixpanel-android", version.ref = "mixpanel" } -netcipher = { group = "info.guardianproject.netcipher", name = "netcipher", version.ref = "netcipher" } -permissionx = { group = "com.guolindev.permissionx", name = "permissionx", version.ref = "permissionx" } -simple-c2pa = { group = "org.witness", name = "simple-c2pa", version.ref = "simple-c2pa" } -jna = { group = "net.java.dev.jna", name = "jna", version.ref = "jna" } satyan-sugar = { group = "com.github.satyan", name = "sugar", version.ref = "satyan-sugar" } timber = { group = "com.jakewharton.timber", name = "timber", version.ref = "timber" } tor-android = { group = "info.guardianproject", name = "tor-android", version.ref = "tor-android" } @@ -277,11 +217,6 @@ android-library = { id = "com.android.library", version.ref = "agp" } androidx-room3 = { id = "androidx.room3", version.ref = "room" } compose-compiler = { id = "org.jetbrains.kotlin.plugin.compose", version.ref = "kotlin" } detekt-plugin = { id = "io.gitlab.arturbosch.detekt", version.ref = "detekt" } -google-firebase-crashlytics = { id = "com.google.firebase.crashlytics", version.ref = "google-firebase-crashlytics" } -google-gms-google-services = { id = "com.google.gms.google-services", version.ref = "google-gms-google-services" } -koin-compiler = { id = "io.insert-koin.compiler.plugin", version.ref = "koin-plugin"} -kotlin-android = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin" } kotlin-serialization = { id = "org.jetbrains.kotlin.plugin.serialization", version.ref = "kotlin" } ksp = { id = "com.google.devtools.ksp", version.ref = "ksp" } navigation-safeargs = { id = "androidx.navigation.safeargs.kotlin", version.ref = "navigation" } -stability-analyzer = { id = "com.github.skydoves.compose.stability.analyzer", version.ref = "compose-stability" } diff --git a/settings.gradle.kts b/settings.gradle.kts index fc9664b4..894d5ba4 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -85,7 +85,6 @@ dependencyResolutionManagement { content { includeModule("com.github.esafirm", "android-image-picker") includeModule("com.github.abdularis", "circularimageview") - includeModule("com.github.guardianproject", "sardine-android") } } }