0

We (ATOM) are working on a construction technology platform that integrates with Autodesk APS to process and visualize site data. As part of our workflow, we use the APS Viewer to display DWG files with referenced orthophotos to our clients, enabling them to use the measurement tools to assess their project's execution precision and quality.

During the development of our platform, we encountered the following issues:

  1. Viewer Calibration: When a DWG file containing a referenced .tif (orthophoto) is uploaded to the viewer (as ZIP folder), the calibration becomes corrupted. This results in separate calibration settings being applied to the DWG content and the orthophoto, leading to discrepancies in measurements.

As shown in the attached image, measuring a similar length on both the orthophoto and the DWG content yields different results, indicating a calibration inconsistency. calibration inconsistency

  1. Uploading Large ZIP Files Results in Timeout or 403 Errors: We are using the OssClient service to upload zip files to our APS bucket. The setup works reliably for small to medium sizes. However, for files larger than 200MB, we consistently encounter timeout errors or 403 errors, requiring multiple retries until the upload is successful. Are there specific size limitations, timeout settings, or best practices we should follow to ensure stable large file uploads?

Any insights or recommended solutions for these issues would be greatly appreciated.

1 Answer 1

0
  • It appears we have a problem with the Measure tool. The calibration is not consistent between the measurement values when snapping to geometry edges and when using Free Measure, drawing freely at an offset to a previously measured edge.
    enter image description here

  • I don't see problem uploading large file, I tested uploading upto 1 GB.

    import { config } from "dotenv";
    import fs from "fs";
    import path from "path";
    import { promisify } from "util";
    import stream from "stream";
    const pipeline = promisify(stream.pipeline);
    
    import {
        SdkManager, SdkManagerBuilder, StaticAuthenticationProvider
    } from "@aps_sdk/autodesk-sdkmanager";
    import { AuthenticationClient, Scopes } from "@aps_sdk/authentication";
    import {
        OssClient, Region, PolicyKey,
    } from "@aps_sdk/oss";
    
    // Load environment variables
    config();
    
    const APS_CLIENT_ID = process.env.APS_CLIENT_ID;
    const APS_CLIENT_SECRET = process.env.APS_CLIENT_SECRET;
    
    if (!APS_CLIENT_ID || !APS_CLIENT_SECRET) {
        console.error("APS_CLIENT_ID and APS_CLIENT_SECRET must be set in .env");
        process.exit(1);
    }
    
    const sdkManager = SdkManagerBuilder.create().build();
    const authenticationClient = new AuthenticationClient();
    
    /**
    * Fetches an access token using APS credentials.
    */
    async function getTwoLeggedToken() {
        try {
            const token = await authenticationClient.getTwoLeggedToken(
                APS_CLIENT_ID,
                APS_CLIENT_SECRET,
                [Scopes.DataRead, Scopes.DataCreate, Scopes.BucketCreate, Scopes.BucketRead, Scopes.BucketUpdate, Scopes.DataWrite]
            );
    
            console.log("Access Token acquired.");
            return token.access_token;
        } catch (error) {
            console.error("Error fetching access token:", error);
            throw error;
        }
    }
    
    const accessToken = await getTwoLeggedToken();
    const staticAuthenticationProvider = new StaticAuthenticationProvider(accessToken);
    
    // Ensure the bucket exists, or create it if it doesn't.
    
    async function ensureBucketExists(ossClient, bucketKey) {
        try {
            await ossClient.createBucket(
                Region.Us,
                { bucketKey, policyKey: PolicyKey.Transient },
                { accessToken }
            );
            console.log(`Bucket "${bucketKey}" created.`);
        } catch (error) {
            const status = error?.axiosError?.response?.status;
            if (status === 409) {
                console.log(`Bucket "${bucketKey}" already exists.`);
            } else {
                throw error;
            }
        }
    }
    
    async function uploadLargeFile(bucketKey, filePath) {
        try {
            const ossClient = new OssClient({ authenticationProvider: staticAuthenticationProvider });
    
    
    
            await ensureBucketExists(ossClient, bucketKey);
    
            const objectKey = path.basename(filePath);
            const fileBuffer = await fs.promises.readFile(filePath);
            const fileSize = (await fs.promises.stat(filePath)).size;
    
            console.log(`Uploading ${filePath} (${fileSize} bytes)...`);
            const abortController = new AbortController();
            abortController.signal.addEventListener("abort", () => console.log("Upload aborted."));
    
            await ossClient.uploadObject(bucketKey, objectKey, fileBuffer, {
                cancellationToken: abortController,
                requestIdPrefix: "UPLOAD_2025_03_25",
                accessToken,
                onProgress: (progress) => console.log(`Progress: ${progress.toFixed(2)}%`)
            });
    
            console.log(`File "${objectKey}" uploaded successfully.`);
        } catch (error) {
            console.error("Upload failed:", error);
        }
    }
    
    
    
    async function generateLargeFile(filePath, sizeInMB = 1024) {
        console.log(`Generating a ${sizeInMB}MB file...`);
    
        const buffer = Buffer.alloc(1024 * 1024, "A"); // 1MB buffer
        const writeStream = fs.createWriteStream(filePath);
    
        for (let i = 0; i < sizeInMB; i++) {
            if (!writeStream.write(buffer)) {
                await new Promise((resolve) => writeStream.once("drain", resolve)); // Handle backpressure
            }
        }
    
        writeStream.end();
        return new Promise((resolve, reject) => {
            writeStream.on("finish", () => {
                console.log("Synthetic file created.");
                resolve();
            });
            writeStream.on("error", reject);
        });
    }
    
    
    //Entry point
    (async () => {
        try {
            const bucketKey = "tmp25032025";
            const filePath = path.join(process.cwd(), "largeFile.dat");
    
            if (fs.existsSync(filePath)) {
                fs.unlinkSync(filePath);
                console.log(`Deleted existing file: ${filePath}`);
            }
    
            await generateLargeFile(filePath);
            await uploadLargeFile(bucketKey, filePath);
        } catch (error) {
            console.error("Unexpected error:", error);
        }
    })();
    
    
Sign up to request clarification or add additional context in comments.

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.